var/home/core/zuul-output/0000755000175000017500000000000015117207501014524 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015117211252015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003350046215117211243017675 0ustar rootrootDec 13 07:04:14 crc systemd[1]: Starting Kubernetes Kubelet... Dec 13 07:04:14 crc restorecon[4689]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 07:04:14 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 13 07:04:15 crc restorecon[4689]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 13 07:04:15 crc kubenswrapper[4733]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 07:04:15 crc kubenswrapper[4733]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 13 07:04:15 crc kubenswrapper[4733]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 07:04:15 crc kubenswrapper[4733]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 07:04:15 crc kubenswrapper[4733]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 13 07:04:15 crc kubenswrapper[4733]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.476569 4733 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480785 4733 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480813 4733 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480818 4733 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480822 4733 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480828 4733 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480832 4733 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480836 4733 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480841 4733 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480845 4733 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480849 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480852 4733 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480856 4733 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480861 4733 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480864 4733 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480868 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480872 4733 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480875 4733 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480878 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480882 4733 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480885 4733 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480888 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480892 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480895 4733 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480899 4733 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480903 4733 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480906 4733 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480910 4733 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480913 4733 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480917 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480924 4733 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480927 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480931 4733 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480934 4733 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480938 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480941 4733 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480945 4733 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480948 4733 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480952 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480955 4733 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480959 4733 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480963 4733 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480967 4733 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480971 4733 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480974 4733 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480978 4733 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480981 4733 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480984 4733 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480988 4733 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480991 4733 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480995 4733 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.480999 4733 feature_gate.go:330] unrecognized feature gate: Example Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481004 4733 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481009 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481013 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481017 4733 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481022 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481025 4733 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481030 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481033 4733 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481037 4733 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481041 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481046 4733 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481049 4733 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481053 4733 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481057 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481060 4733 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481066 4733 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481071 4733 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481075 4733 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481079 4733 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.481083 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481618 4733 flags.go:64] FLAG: --address="0.0.0.0" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481634 4733 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481643 4733 flags.go:64] FLAG: --anonymous-auth="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481649 4733 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481655 4733 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481659 4733 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481665 4733 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481671 4733 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481675 4733 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481679 4733 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481684 4733 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481689 4733 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481693 4733 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481698 4733 flags.go:64] FLAG: --cgroup-root="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481702 4733 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481706 4733 flags.go:64] FLAG: --client-ca-file="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481710 4733 flags.go:64] FLAG: --cloud-config="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481715 4733 flags.go:64] FLAG: --cloud-provider="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481719 4733 flags.go:64] FLAG: --cluster-dns="[]" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481725 4733 flags.go:64] FLAG: --cluster-domain="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481729 4733 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481734 4733 flags.go:64] FLAG: --config-dir="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481738 4733 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481743 4733 flags.go:64] FLAG: --container-log-max-files="5" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481761 4733 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481765 4733 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481770 4733 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481774 4733 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481778 4733 flags.go:64] FLAG: --contention-profiling="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481782 4733 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481786 4733 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481791 4733 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481795 4733 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481801 4733 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481805 4733 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481809 4733 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481812 4733 flags.go:64] FLAG: --enable-load-reader="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481817 4733 flags.go:64] FLAG: --enable-server="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481821 4733 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481827 4733 flags.go:64] FLAG: --event-burst="100" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481831 4733 flags.go:64] FLAG: --event-qps="50" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481835 4733 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481839 4733 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481843 4733 flags.go:64] FLAG: --eviction-hard="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481849 4733 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481853 4733 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481857 4733 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481863 4733 flags.go:64] FLAG: --eviction-soft="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481867 4733 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481871 4733 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481875 4733 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481880 4733 flags.go:64] FLAG: --experimental-mounter-path="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481884 4733 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481888 4733 flags.go:64] FLAG: --fail-swap-on="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481892 4733 flags.go:64] FLAG: --feature-gates="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481900 4733 flags.go:64] FLAG: --file-check-frequency="20s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481905 4733 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481909 4733 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481913 4733 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481918 4733 flags.go:64] FLAG: --healthz-port="10248" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481922 4733 flags.go:64] FLAG: --help="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481926 4733 flags.go:64] FLAG: --hostname-override="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481930 4733 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481934 4733 flags.go:64] FLAG: --http-check-frequency="20s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481939 4733 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481944 4733 flags.go:64] FLAG: --image-credential-provider-config="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481948 4733 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481952 4733 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481956 4733 flags.go:64] FLAG: --image-service-endpoint="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481960 4733 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481965 4733 flags.go:64] FLAG: --kube-api-burst="100" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481969 4733 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481974 4733 flags.go:64] FLAG: --kube-api-qps="50" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481978 4733 flags.go:64] FLAG: --kube-reserved="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481982 4733 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481985 4733 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481990 4733 flags.go:64] FLAG: --kubelet-cgroups="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481994 4733 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.481999 4733 flags.go:64] FLAG: --lock-file="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482003 4733 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482007 4733 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482011 4733 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482018 4733 flags.go:64] FLAG: --log-json-split-stream="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482023 4733 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482027 4733 flags.go:64] FLAG: --log-text-split-stream="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482031 4733 flags.go:64] FLAG: --logging-format="text" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482035 4733 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482040 4733 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482044 4733 flags.go:64] FLAG: --manifest-url="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482048 4733 flags.go:64] FLAG: --manifest-url-header="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482053 4733 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482057 4733 flags.go:64] FLAG: --max-open-files="1000000" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482063 4733 flags.go:64] FLAG: --max-pods="110" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482066 4733 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482071 4733 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482075 4733 flags.go:64] FLAG: --memory-manager-policy="None" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482080 4733 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482086 4733 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482090 4733 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482095 4733 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482105 4733 flags.go:64] FLAG: --node-status-max-images="50" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482109 4733 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482113 4733 flags.go:64] FLAG: --oom-score-adj="-999" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482118 4733 flags.go:64] FLAG: --pod-cidr="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482122 4733 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482129 4733 flags.go:64] FLAG: --pod-manifest-path="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482133 4733 flags.go:64] FLAG: --pod-max-pids="-1" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482137 4733 flags.go:64] FLAG: --pods-per-core="0" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482142 4733 flags.go:64] FLAG: --port="10250" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482146 4733 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482150 4733 flags.go:64] FLAG: --provider-id="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482154 4733 flags.go:64] FLAG: --qos-reserved="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482158 4733 flags.go:64] FLAG: --read-only-port="10255" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482162 4733 flags.go:64] FLAG: --register-node="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482167 4733 flags.go:64] FLAG: --register-schedulable="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482172 4733 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482179 4733 flags.go:64] FLAG: --registry-burst="10" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482183 4733 flags.go:64] FLAG: --registry-qps="5" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482187 4733 flags.go:64] FLAG: --reserved-cpus="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482192 4733 flags.go:64] FLAG: --reserved-memory="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482197 4733 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482202 4733 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482228 4733 flags.go:64] FLAG: --rotate-certificates="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482232 4733 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482237 4733 flags.go:64] FLAG: --runonce="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482241 4733 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482245 4733 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482250 4733 flags.go:64] FLAG: --seccomp-default="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482254 4733 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482258 4733 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482262 4733 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482267 4733 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482271 4733 flags.go:64] FLAG: --storage-driver-password="root" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482276 4733 flags.go:64] FLAG: --storage-driver-secure="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482282 4733 flags.go:64] FLAG: --storage-driver-table="stats" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482286 4733 flags.go:64] FLAG: --storage-driver-user="root" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482290 4733 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482296 4733 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482301 4733 flags.go:64] FLAG: --system-cgroups="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482305 4733 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482311 4733 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482315 4733 flags.go:64] FLAG: --tls-cert-file="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482319 4733 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482324 4733 flags.go:64] FLAG: --tls-min-version="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482328 4733 flags.go:64] FLAG: --tls-private-key-file="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482332 4733 flags.go:64] FLAG: --topology-manager-policy="none" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482336 4733 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482340 4733 flags.go:64] FLAG: --topology-manager-scope="container" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482344 4733 flags.go:64] FLAG: --v="2" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482350 4733 flags.go:64] FLAG: --version="false" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482356 4733 flags.go:64] FLAG: --vmodule="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482361 4733 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482366 4733 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482467 4733 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482472 4733 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482477 4733 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482482 4733 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482487 4733 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482491 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482495 4733 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482498 4733 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482502 4733 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482505 4733 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482509 4733 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482513 4733 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482516 4733 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482520 4733 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482523 4733 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482527 4733 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482531 4733 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482534 4733 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482537 4733 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482541 4733 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482544 4733 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482548 4733 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482551 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482555 4733 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482558 4733 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482562 4733 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482567 4733 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482572 4733 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482576 4733 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482580 4733 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482584 4733 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482587 4733 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482591 4733 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482594 4733 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482598 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482602 4733 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482605 4733 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482609 4733 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482613 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482618 4733 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482622 4733 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482627 4733 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482631 4733 feature_gate.go:330] unrecognized feature gate: Example Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482634 4733 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482638 4733 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482642 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482645 4733 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482649 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482652 4733 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482656 4733 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482659 4733 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482663 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482666 4733 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482670 4733 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482673 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482677 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482680 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482684 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482687 4733 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482691 4733 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482694 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482698 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482701 4733 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482705 4733 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482708 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482712 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482715 4733 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482719 4733 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482722 4733 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482726 4733 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.482729 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.482741 4733 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.489570 4733 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.489594 4733 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489674 4733 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489686 4733 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489691 4733 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489695 4733 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489699 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489704 4733 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489708 4733 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489713 4733 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489719 4733 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489723 4733 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489726 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489730 4733 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489733 4733 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489737 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489740 4733 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489744 4733 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489747 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489751 4733 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489754 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489758 4733 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489761 4733 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489765 4733 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489768 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489772 4733 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489776 4733 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489779 4733 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489783 4733 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489787 4733 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489790 4733 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489794 4733 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489797 4733 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489801 4733 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489805 4733 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489808 4733 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489812 4733 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489816 4733 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489820 4733 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489824 4733 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489828 4733 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489833 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489837 4733 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489841 4733 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489845 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489848 4733 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489852 4733 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489856 4733 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489859 4733 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489863 4733 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489866 4733 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489870 4733 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489874 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489879 4733 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489885 4733 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489894 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489899 4733 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489904 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489909 4733 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489915 4733 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489922 4733 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489928 4733 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489932 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489937 4733 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489943 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489948 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489952 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489957 4733 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489963 4733 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489969 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489974 4733 feature_gate.go:330] unrecognized feature gate: Example Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489979 4733 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.489987 4733 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.489993 4733 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490119 4733 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490130 4733 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490136 4733 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490141 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490145 4733 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490149 4733 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490153 4733 feature_gate.go:330] unrecognized feature gate: Example Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490157 4733 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490160 4733 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490164 4733 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490168 4733 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490171 4733 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490175 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490179 4733 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490182 4733 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490185 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490189 4733 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490192 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490197 4733 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490200 4733 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490219 4733 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490223 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490226 4733 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490230 4733 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490233 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490237 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490240 4733 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490244 4733 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490248 4733 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490252 4733 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490255 4733 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490259 4733 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490264 4733 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490271 4733 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490281 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490286 4733 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490293 4733 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490297 4733 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490301 4733 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490306 4733 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490311 4733 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490316 4733 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490320 4733 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490325 4733 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490330 4733 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490336 4733 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490342 4733 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490346 4733 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490350 4733 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490354 4733 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490358 4733 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490362 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490366 4733 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490371 4733 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490375 4733 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490379 4733 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490383 4733 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490388 4733 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490392 4733 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490396 4733 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490401 4733 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490405 4733 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490409 4733 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490414 4733 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490418 4733 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490423 4733 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490427 4733 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490432 4733 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490436 4733 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490441 4733 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.490451 4733 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.490457 4733 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.490731 4733 server.go:940] "Client rotation is on, will bootstrap in background" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.493573 4733 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.493664 4733 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.494183 4733 server.go:997] "Starting client certificate rotation" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.494231 4733 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.494361 4733 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-17 02:14:00.93169719 +0000 UTC Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.494439 4733 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 91h9m45.437260934s for next certificate rotation Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.499135 4733 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.500463 4733 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.507958 4733 log.go:25] "Validated CRI v1 runtime API" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.521218 4733 log.go:25] "Validated CRI v1 image API" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.523939 4733 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.526108 4733 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-13-07-00-27-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.526137 4733 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.539820 4733 manager.go:217] Machine: {Timestamp:2025-12-13 07:04:15.538762626 +0000 UTC m=+0.149993129 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:6c562c68-0dae-4032-a6e3-a1edf517d11e BootID:61aa24df-270f-41df-a3ba-89af404afd63 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:fd:18:44 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:fd:18:44 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5a:aa:dd Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:b5:43:a8 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:c3:4f:3d Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:22:a9:45 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:2a:6e:91:2d:96:88 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:2e:20:bd:f0:0c:f7 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.540018 4733 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.540143 4733 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.540573 4733 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.540754 4733 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.540786 4733 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.540985 4733 topology_manager.go:138] "Creating topology manager with none policy" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.540993 4733 container_manager_linux.go:303] "Creating device plugin manager" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.541168 4733 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.541199 4733 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.541540 4733 state_mem.go:36] "Initialized new in-memory state store" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.541625 4733 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.542252 4733 kubelet.go:418] "Attempting to sync node with API server" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.542271 4733 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.542291 4733 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.542303 4733 kubelet.go:324] "Adding apiserver pod source" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.542313 4733 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.544149 4733 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.544562 4733 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.545175 4733 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.545128 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.545190 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.545553 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.545474 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.545863 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.545933 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.545984 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546054 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546120 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546173 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546252 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546314 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546364 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546411 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546477 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546527 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.546728 4733 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.547159 4733 server.go:1280] "Started kubelet" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.548527 4733 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.548728 4733 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:15 crc systemd[1]: Started Kubernetes Kubelet. Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.550144 4733 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1880b471f9f54f7c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 07:04:15.547142012 +0000 UTC m=+0.158372495,LastTimestamp:2025-12-13 07:04:15.547142012 +0000 UTC m=+0.158372495,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.550898 4733 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.551591 4733 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.553099 4733 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.553137 4733 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.553383 4733 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 06:36:29.020500023 +0000 UTC Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.553540 4733 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.553561 4733 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.553592 4733 server.go:460] "Adding debug handlers to kubelet server" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.553635 4733 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.553589 4733 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.554361 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="200ms" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.555063 4733 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.555088 4733 factory.go:55] Registering systemd factory Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.555098 4733 factory.go:221] Registration of the systemd container factory successfully Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.555234 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.555279 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.555347 4733 factory.go:153] Registering CRI-O factory Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.555377 4733 factory.go:221] Registration of the crio container factory successfully Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.555398 4733 factory.go:103] Registering Raw factory Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.555413 4733 manager.go:1196] Started watching for new ooms in manager Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.556295 4733 manager.go:319] Starting recovery of all containers Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.563993 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564044 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564056 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564067 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564077 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564086 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564094 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564104 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564114 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564123 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564134 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.564149 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565583 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565603 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565613 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565627 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565637 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565648 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565658 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565667 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565676 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565686 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565696 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565706 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565715 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565725 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565737 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565747 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565766 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565775 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565786 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565797 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565806 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565816 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565852 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565862 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565872 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565880 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565890 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565900 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565909 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565919 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565928 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.565939 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.566835 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.566857 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.566875 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.566887 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.566902 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.566913 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572026 4733 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572087 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572121 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572153 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572176 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572260 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572286 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572310 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572323 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572339 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572353 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572366 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572384 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572397 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572414 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572429 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572444 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572461 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572475 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572488 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572504 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572543 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572560 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572576 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572591 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572612 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572625 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572645 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572660 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572673 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572691 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572706 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572725 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572739 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572752 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572769 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572784 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572803 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572819 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572873 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.572899 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573403 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573451 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573464 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573475 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573488 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573499 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573509 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573520 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573530 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573540 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573550 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573560 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573570 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573580 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573610 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573628 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573641 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573652 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573664 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573676 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573685 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573696 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573707 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573718 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573729 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573738 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573748 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573760 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573770 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573781 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573791 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573801 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573812 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573823 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573833 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573842 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573892 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573902 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573913 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573922 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573933 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573942 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573951 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573961 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573973 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573983 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.573992 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574001 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574012 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574021 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574032 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574042 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574053 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574062 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574072 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574080 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574090 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574101 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574111 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574120 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574132 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574143 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574153 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574163 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574173 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574181 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574191 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574200 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574241 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574252 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574263 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574272 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574282 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574293 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574303 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574314 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574325 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574335 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574345 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574355 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574365 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574374 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574384 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574396 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574406 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574415 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574424 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574435 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574445 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574480 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574491 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574501 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574510 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574521 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574531 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574541 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574552 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574561 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574571 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574584 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574594 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574604 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574614 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574624 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574635 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574646 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574657 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574667 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574677 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574689 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574699 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574709 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574719 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574730 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574740 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574753 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574764 4733 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574773 4733 reconstruct.go:97] "Volume reconstruction finished" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.574781 4733 reconciler.go:26] "Reconciler: start to sync state" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.576803 4733 manager.go:324] Recovery completed Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.586814 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.588141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.588200 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.588228 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.589165 4733 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.589181 4733 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.589202 4733 state_mem.go:36] "Initialized new in-memory state store" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.591724 4733 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.593439 4733 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.593470 4733 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.593488 4733 kubelet.go:2335] "Starting kubelet main sync loop" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.593531 4733 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 13 07:04:15 crc kubenswrapper[4733]: W1213 07:04:15.594916 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.594976 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.598911 4733 policy_none.go:49] "None policy: Start" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.599649 4733 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.599680 4733 state_mem.go:35] "Initializing new in-memory state store" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.646464 4733 manager.go:334] "Starting Device Plugin manager" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.646515 4733 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.646527 4733 server.go:79] "Starting device plugin registration server" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.646913 4733 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.646927 4733 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.647271 4733 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.647346 4733 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.647352 4733 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.653128 4733 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.694735 4733 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.694864 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.696287 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.696327 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.696337 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.696503 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.696749 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.696792 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.697771 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.697800 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.697803 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.697816 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.697868 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.697826 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.698015 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.698190 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.698268 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.698940 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.698969 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.698982 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699074 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699119 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699150 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699270 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699342 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699969 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.699994 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700006 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700058 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700077 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700087 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700105 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700349 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700405 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700751 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700783 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700796 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700944 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.700979 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.703198 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.703241 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.704034 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.704088 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.704102 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.704063 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.748165 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.749547 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.749610 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.749624 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.749663 4733 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.750293 4733 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.41:6443: connect: connection refused" node="crc" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.755426 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="400ms" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777158 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777223 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777260 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777286 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777308 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777329 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777350 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777372 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777396 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777491 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777522 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777603 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777671 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777703 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.777733 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.878893 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.878968 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.878984 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879004 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879020 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879036 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879050 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879065 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879084 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879104 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879119 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879134 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879150 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879165 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879180 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879191 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879283 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879377 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879400 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879235 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879437 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879444 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879487 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879504 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879540 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879585 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879636 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879719 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879721 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.879848 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.950937 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.952588 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.952753 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.952782 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:15 crc kubenswrapper[4733]: I1213 07:04:15.952938 4733 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 07:04:15 crc kubenswrapper[4733]: E1213 07:04:15.954022 4733 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.41:6443: connect: connection refused" node="crc" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.041328 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.045305 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 13 07:04:16 crc kubenswrapper[4733]: W1213 07:04:16.069941 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-870528b26b4deedc6247f67889a39ce16e708101081c7626634a9944a93b1dfe WatchSource:0}: Error finding container 870528b26b4deedc6247f67889a39ce16e708101081c7626634a9944a93b1dfe: Status 404 returned error can't find the container with id 870528b26b4deedc6247f67889a39ce16e708101081c7626634a9944a93b1dfe Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.070652 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:16 crc kubenswrapper[4733]: W1213 07:04:16.087813 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-cc9a03ba1aa7837f5bd62a4249eac7d4a0512ede72bd1db12b3bfa26395ae8eb WatchSource:0}: Error finding container cc9a03ba1aa7837f5bd62a4249eac7d4a0512ede72bd1db12b3bfa26395ae8eb: Status 404 returned error can't find the container with id cc9a03ba1aa7837f5bd62a4249eac7d4a0512ede72bd1db12b3bfa26395ae8eb Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.097199 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.103386 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:16 crc kubenswrapper[4733]: W1213 07:04:16.116673 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-f8aa3a3e906db4aa0f478118aa5d8bca8b06054a24c52e050595e279db2a29f1 WatchSource:0}: Error finding container f8aa3a3e906db4aa0f478118aa5d8bca8b06054a24c52e050595e279db2a29f1: Status 404 returned error can't find the container with id f8aa3a3e906db4aa0f478118aa5d8bca8b06054a24c52e050595e279db2a29f1 Dec 13 07:04:16 crc kubenswrapper[4733]: E1213 07:04:16.157029 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="800ms" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.355044 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.356020 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.356058 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.356068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.356088 4733 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 07:04:16 crc kubenswrapper[4733]: E1213 07:04:16.356482 4733 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.41:6443: connect: connection refused" node="crc" Dec 13 07:04:16 crc kubenswrapper[4733]: W1213 07:04:16.536550 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:16 crc kubenswrapper[4733]: E1213 07:04:16.536992 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.549613 4733 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.553796 4733 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 18:46:41.796438097 +0000 UTC Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.553853 4733 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 731h42m25.242588671s for next certificate rotation Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.597760 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.597869 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f8aa3a3e906db4aa0f478118aa5d8bca8b06054a24c52e050595e279db2a29f1"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.599577 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db" exitCode=0 Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.599640 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.599672 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cc9a03ba1aa7837f5bd62a4249eac7d4a0512ede72bd1db12b3bfa26395ae8eb"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.599757 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.600766 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.600794 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.600806 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.601158 4733 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e" exitCode=0 Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.601248 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.601301 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"df1c7bc3b98c416d77eb244ab567e3ccf0d15f5450f82a9a732de74689440516"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.601480 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602174 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602203 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602228 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602466 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602603 4733 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b" exitCode=0 Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602648 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602664 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"870528b26b4deedc6247f67889a39ce16e708101081c7626634a9944a93b1dfe"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.602710 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.603250 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.603272 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.603280 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.603395 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.603415 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.603425 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.604583 4733 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975" exitCode=0 Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.604648 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.604700 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"55bffb818a95edd252edafcb19d3bc94aa46af4cae249732d660fa947a0bd9c4"} Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.604807 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.606491 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.606516 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:16 crc kubenswrapper[4733]: I1213 07:04:16.606744 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:16 crc kubenswrapper[4733]: W1213 07:04:16.695301 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:16 crc kubenswrapper[4733]: E1213 07:04:16.695384 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:16 crc kubenswrapper[4733]: W1213 07:04:16.774875 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:16 crc kubenswrapper[4733]: E1213 07:04:16.774964 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:16 crc kubenswrapper[4733]: W1213 07:04:16.869624 4733 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.41:6443: connect: connection refused Dec 13 07:04:16 crc kubenswrapper[4733]: E1213 07:04:16.869862 4733 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.41:6443: connect: connection refused" logger="UnhandledError" Dec 13 07:04:16 crc kubenswrapper[4733]: E1213 07:04:16.958279 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="1.6s" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.156896 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.158635 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.158682 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.158693 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.158721 4733 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.610487 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.610546 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.610557 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.610568 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.611839 4733 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351" exitCode=0 Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.611904 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.612050 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.612833 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.612861 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.612871 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.622184 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.622339 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.623592 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.623630 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.623641 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.625940 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.626084 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.626188 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.626253 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.627516 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.627571 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.627581 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.635969 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.636022 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.636041 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c"} Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.636186 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.638883 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.638922 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:17 crc kubenswrapper[4733]: I1213 07:04:17.638931 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.041157 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.514835 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.640308 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294"} Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.640422 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.641103 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.641129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.641141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.643198 4733 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde" exitCode=0 Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.643232 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde"} Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.643304 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.643334 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.643370 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644190 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644311 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644327 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644260 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644382 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644393 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644428 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644446 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:18 crc kubenswrapper[4733]: I1213 07:04:18.644454 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.161433 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652016 4733 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652056 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652054 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af"} Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652036 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652099 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1"} Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652111 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d"} Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652122 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1"} Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652133 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926"} Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652063 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.652300 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653482 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653493 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653563 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653529 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653583 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653603 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653617 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653626 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653666 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653797 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653821 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:19 crc kubenswrapper[4733]: I1213 07:04:19.653833 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:20 crc kubenswrapper[4733]: I1213 07:04:20.654191 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:20 crc kubenswrapper[4733]: I1213 07:04:20.655643 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:20 crc kubenswrapper[4733]: I1213 07:04:20.655721 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:20 crc kubenswrapper[4733]: I1213 07:04:20.655748 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.162468 4733 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.162592 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.162636 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.162867 4733 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.162936 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.164494 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.164526 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.164537 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.871946 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.872104 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.873142 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.873177 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:22 crc kubenswrapper[4733]: I1213 07:04:22.873187 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.046369 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.046600 4733 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.046656 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.048353 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.048405 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.048416 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.447052 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.447286 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.448578 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.448620 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.448630 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.852437 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.853171 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.854998 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.855053 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:23 crc kubenswrapper[4733]: I1213 07:04:23.855069 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.527923 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.528302 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.530463 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.530571 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.530596 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.615179 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.615504 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.617460 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.617511 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.617523 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.620618 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:25 crc kubenswrapper[4733]: E1213 07:04:25.653415 4733 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.846513 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.847850 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.847907 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:25 crc kubenswrapper[4733]: I1213 07:04:25.847922 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:27 crc kubenswrapper[4733]: E1213 07:04:27.159476 4733 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 13 07:04:27 crc kubenswrapper[4733]: I1213 07:04:27.550518 4733 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.530125 4733 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.530255 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.542984 4733 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.543058 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.760446 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.761950 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.761994 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.762007 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:28 crc kubenswrapper[4733]: I1213 07:04:28.762030 4733 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.162668 4733 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.162794 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.170032 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.170339 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.172680 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.172741 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.172757 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.178145 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.868950 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.870776 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.870828 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.870848 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.878283 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.878515 4733 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.880382 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.880427 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:32 crc kubenswrapper[4733]: I1213 07:04:32.880441 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.534047 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.535972 4733 trace.go:236] Trace[351370227]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 07:04:18.990) (total time: 14545ms): Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[351370227]: ---"Objects listed" error: 14545ms (07:04:33.535) Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[351370227]: [14.545166975s] [14.545166975s] END Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.536296 4733 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.538930 4733 trace.go:236] Trace[1856170019]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 07:04:18.703) (total time: 14835ms): Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[1856170019]: ---"Objects listed" error: 14835ms (07:04:33.538) Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[1856170019]: [14.83549644s] [14.83549644s] END Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.538986 4733 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.539030 4733 trace.go:236] Trace[755488302]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 07:04:19.079) (total time: 14459ms): Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[755488302]: ---"Objects listed" error: 14459ms (07:04:33.538) Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[755488302]: [14.459738758s] [14.459738758s] END Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.539059 4733 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.541825 4733 trace.go:236] Trace[1530256420]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (13-Dec-2025 07:04:19.492) (total time: 14048ms): Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[1530256420]: ---"Objects listed" error: 14048ms (07:04:33.541) Dec 13 07:04:33 crc kubenswrapper[4733]: Trace[1530256420]: [14.048877291s] [14.048877291s] END Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.541852 4733 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.542178 4733 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.575186 4733 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40312->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.575184 4733 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40308->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.575271 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40312->192.168.126.11:17697: read: connection reset by peer" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.575332 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40308->192.168.126.11:17697: read: connection reset by peer" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.575659 4733 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.575718 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.787024 4733 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.787232 4733 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.788864 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.788904 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.788914 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.788935 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.788947 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:33Z","lastTransitionTime":"2025-12-13T07:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.809019 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.813956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.814007 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.814017 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.814038 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.814049 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:33Z","lastTransitionTime":"2025-12-13T07:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.830550 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.834857 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.834910 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.834923 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.834945 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.834963 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:33Z","lastTransitionTime":"2025-12-13T07:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.844004 4733 apiserver.go:52] "Watching apiserver" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.846662 4733 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.846891 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.847237 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.847336 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.847418 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.847389 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.847466 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.847510 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.847278 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.847735 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.847939 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.850482 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.851191 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.851532 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.852076 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.852232 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.853639 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.853741 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.853940 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.854914 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.857614 4733 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.861647 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.868288 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.868339 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.868352 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.868370 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.868382 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:33Z","lastTransitionTime":"2025-12-13T07:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.873565 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.877723 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294" exitCode=255 Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.877789 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294"} Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.880632 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.881629 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.884030 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.884069 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.884079 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.884099 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.884110 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:33Z","lastTransitionTime":"2025-12-13T07:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.885790 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.889857 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.890131 4733 scope.go:117] "RemoveContainer" containerID="e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.896248 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.896338 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.896548 4733 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.897876 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.897916 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.897934 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.897956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.897972 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:33Z","lastTransitionTime":"2025-12-13T07:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.899453 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.909919 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.923020 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.937408 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944050 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944091 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944133 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944149 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944165 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944180 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944196 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944225 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944254 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944268 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944285 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944302 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944328 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944388 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944408 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944424 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944451 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944475 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944502 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944488 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944525 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944544 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944563 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944606 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944624 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944640 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944657 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944674 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944689 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944705 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944721 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944737 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944752 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944767 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944783 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944803 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944821 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944838 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944950 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944969 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.944985 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945000 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945014 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945035 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945049 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945065 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945080 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945094 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945110 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945126 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945140 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945155 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945170 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945185 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945201 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945234 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945250 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945265 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945282 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945299 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945314 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945332 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945348 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945362 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945383 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945399 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945404 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945419 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945412 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945617 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.946262 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.946409 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.946648 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.946746 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.946765 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.946856 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947242 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947254 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947264 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947338 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947517 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.947528 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:04:34.447508108 +0000 UTC m=+19.058738591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947671 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947797 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947866 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947877 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.947717 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948006 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948147 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948158 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948305 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948531 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.945414 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948666 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948694 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948581 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948609 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948717 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948723 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948793 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948793 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948887 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948918 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948938 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948948 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.949165 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.949172 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.949244 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.949419 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.949623 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.949670 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.949775 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.950070 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.950109 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.950275 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.950352 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948973 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.948642 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.952402 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.952481 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.952487 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.952769 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953042 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953048 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953074 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953192 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953391 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953482 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953003 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953610 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953738 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953740 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953813 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.953842 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.954031 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955927 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955971 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.954172 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956002 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.954187 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.954192 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956029 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.954235 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956133 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.954398 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956059 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955459 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955508 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955529 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956382 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956411 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956432 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956449 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956506 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956528 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956544 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956577 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956598 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956641 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956667 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956694 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956713 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956730 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956749 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956768 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956801 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956863 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956896 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956918 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956948 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956988 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957008 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957023 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957122 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957147 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957166 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955650 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955865 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.955960 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.956848 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957179 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957252 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957323 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957352 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957382 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957417 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957440 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957465 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957491 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957513 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957712 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.957809 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958117 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958404 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958492 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958615 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958657 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958695 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958718 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958737 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958687 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958757 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958780 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958806 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958826 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958903 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958904 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.958901 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959039 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959026 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959193 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959319 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959340 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959528 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959581 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959640 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959778 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959874 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959870 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959989 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.959998 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960271 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960358 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960464 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960391 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960510 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960731 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960734 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960798 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960841 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.960869 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961319 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961300 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961322 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961575 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961671 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961868 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961718 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961896 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.961933 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962017 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962053 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962106 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962430 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962448 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962476 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962558 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962675 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.962714 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964566 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964791 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964861 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964887 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964914 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964939 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964961 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.964981 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965005 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965026 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965053 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965075 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965087 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965097 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965093 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965123 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965128 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965148 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965164 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965171 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965194 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965236 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965257 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965279 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965304 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965325 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965347 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965368 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965390 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965409 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965434 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965437 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965454 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965477 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965500 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965522 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965563 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965590 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965614 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965639 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965661 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965685 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965705 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965728 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965751 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965773 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965798 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965819 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965843 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965863 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965883 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965905 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965910 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965929 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.965953 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966038 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966063 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966083 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966105 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966128 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966180 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966232 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966257 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966277 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966308 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966329 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966351 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966377 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966396 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966450 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966486 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966510 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966543 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966565 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966631 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966663 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966690 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966720 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966744 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966767 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966794 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966818 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966843 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966866 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966889 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966873 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.966911 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967053 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967160 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967189 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967272 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967370 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967512 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967673 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967747 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967802 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967874 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.967995 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968019 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968091 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968621 4733 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968643 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968654 4733 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968666 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968677 4733 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968687 4733 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968698 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968708 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968718 4733 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968729 4733 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968739 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968749 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968759 4733 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968768 4733 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968777 4733 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968788 4733 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968797 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968807 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968816 4733 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968826 4733 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968835 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968843 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968852 4733 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968862 4733 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968874 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968883 4733 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968893 4733 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968902 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968911 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968922 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968931 4733 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968942 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968951 4733 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968961 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968969 4733 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968979 4733 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968988 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.968996 4733 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969005 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969014 4733 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969023 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969032 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969042 4733 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969051 4733 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969060 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969069 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969078 4733 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969087 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969096 4733 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969106 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969118 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969128 4733 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969138 4733 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969148 4733 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969159 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969173 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969189 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969203 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969236 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969251 4733 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969264 4733 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969277 4733 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969287 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969296 4733 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969307 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969319 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969334 4733 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969344 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969354 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969364 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969375 4733 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969386 4733 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969396 4733 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969406 4733 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969415 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969424 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969434 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969443 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969453 4733 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969462 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969473 4733 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969482 4733 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969493 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969502 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969512 4733 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969520 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969530 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969540 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969550 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969559 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969569 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969578 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969588 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969597 4733 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969606 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969615 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969633 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969642 4733 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969652 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969662 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969672 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969681 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969690 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969700 4733 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969712 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969721 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969730 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969739 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969748 4733 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969758 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969767 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969783 4733 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969801 4733 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969811 4733 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969819 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969827 4733 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969836 4733 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969845 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969853 4733 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969863 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969880 4733 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969888 4733 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969897 4733 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969906 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969914 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969927 4733 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.969961 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.969989 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.970041 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:34.470024715 +0000 UTC m=+19.081255198 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.970610 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.970757 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:34.470737946 +0000 UTC m=+19.081968429 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.970758 4733 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.970854 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.972409 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.972863 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.973135 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.973380 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.975279 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.973467 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.973568 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.973767 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.973919 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.973943 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.974329 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.974536 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.974901 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.975152 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.975434 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.975537 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.975600 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.972513 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.975662 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.977786 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.978435 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.978445 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.979187 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.979817 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.979851 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.979866 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.979971 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:34.479949906 +0000 UTC m=+19.091180559 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.980125 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.980292 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.980365 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.980654 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.980674 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.980731 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.984958 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.985056 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.985126 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:33 crc kubenswrapper[4733]: E1213 07:04:33.985317 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:34.485295817 +0000 UTC m=+19.096526290 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.987123 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.989644 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.991075 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.991465 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.991950 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.993473 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.993799 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.993833 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.994190 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.994327 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.994473 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.994812 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.994998 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.994999 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.995321 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.995560 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.995610 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.996062 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.996069 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.996444 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.996618 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.996728 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.996832 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.997419 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:33 crc kubenswrapper[4733]: I1213 07:04:33.999673 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.001710 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.004571 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.005859 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.007639 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.007954 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.008612 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.012029 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.012069 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.012080 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.012096 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.012105 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.013916 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.014590 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.019334 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.026838 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.033944 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.035719 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.040054 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.045639 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.048099 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.051122 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.062993 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.070976 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071025 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071059 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071071 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071080 4733 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071090 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071099 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071108 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071116 4733 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071124 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071133 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071141 4733 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071149 4733 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071158 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071166 4733 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071174 4733 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071182 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071194 4733 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071202 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071226 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071235 4733 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071243 4733 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071252 4733 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071260 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071271 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071279 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071286 4733 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071294 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071302 4733 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071309 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071317 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071325 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071333 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071341 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071348 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071359 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071368 4733 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071379 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071388 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071398 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071408 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071419 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071431 4733 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071442 4733 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071440 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071453 4733 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071495 4733 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071496 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071508 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071520 4733 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071534 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071544 4733 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071554 4733 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071567 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071578 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071587 4733 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071597 4733 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071608 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071618 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071629 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071639 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071652 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071662 4733 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071674 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071687 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071698 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071709 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071723 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071737 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071762 4733 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071771 4733 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.071780 4733 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.114370 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.114401 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.114408 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.114421 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.114429 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.164585 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.173456 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 13 07:04:34 crc kubenswrapper[4733]: W1213 07:04:34.180306 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-1420751d21a531a28488e9af239e61022ace1df20c5f5f745d33b6b3a32da49e WatchSource:0}: Error finding container 1420751d21a531a28488e9af239e61022ace1df20c5f5f745d33b6b3a32da49e: Status 404 returned error can't find the container with id 1420751d21a531a28488e9af239e61022ace1df20c5f5f745d33b6b3a32da49e Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.180567 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 13 07:04:34 crc kubenswrapper[4733]: W1213 07:04:34.183567 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-4a15b61e789084ab49b0944fa3a309ca417a84d2d56236aa9eaaa22f662a7b2e WatchSource:0}: Error finding container 4a15b61e789084ab49b0944fa3a309ca417a84d2d56236aa9eaaa22f662a7b2e: Status 404 returned error can't find the container with id 4a15b61e789084ab49b0944fa3a309ca417a84d2d56236aa9eaaa22f662a7b2e Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.216439 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.216474 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.216482 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.216497 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.216508 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.319070 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.319131 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.319149 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.319176 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.319195 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.421912 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.421992 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.422003 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.422018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.422029 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.474406 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.474562 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.474611 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:04:35.474586074 +0000 UTC m=+20.085816567 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.474650 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.474677 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.474771 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.474775 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:35.474757509 +0000 UTC m=+20.085987992 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.474864 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:35.47482777 +0000 UTC m=+20.086058313 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.525534 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.525619 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.525672 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.525696 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.525710 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.575484 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.575548 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.575731 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.575759 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.575773 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.575815 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.575877 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.575902 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.575840 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:35.575817688 +0000 UTC m=+20.187048181 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:34 crc kubenswrapper[4733]: E1213 07:04:34.576017 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:35.575983203 +0000 UTC m=+20.187213726 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.628657 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.628726 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.628738 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.628754 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.628763 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.731069 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.731107 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.731116 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.731150 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.731160 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.834038 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.834095 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.834108 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.834126 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.834141 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.880642 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1420751d21a531a28488e9af239e61022ace1df20c5f5f745d33b6b3a32da49e"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.881692 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4a15b61e789084ab49b0944fa3a309ca417a84d2d56236aa9eaaa22f662a7b2e"} Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.936885 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.936931 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.936941 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.936960 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:34 crc kubenswrapper[4733]: I1213 07:04:34.936974 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:34Z","lastTransitionTime":"2025-12-13T07:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.039576 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.039621 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.039633 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.039649 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.039661 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: W1213 07:04:35.047817 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-2520f86038a62304d505edd8d7ef076e08cb17753659a78e72f9ddf531e93263 WatchSource:0}: Error finding container 2520f86038a62304d505edd8d7ef076e08cb17753659a78e72f9ddf531e93263: Status 404 returned error can't find the container with id 2520f86038a62304d505edd8d7ef076e08cb17753659a78e72f9ddf531e93263 Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.142687 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.142736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.142747 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.142766 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.142778 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.245899 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.245956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.245970 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.245994 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.246009 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.348249 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.348286 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.348294 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.348308 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.348317 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.451424 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.451472 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.451485 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.451502 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.451517 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.483449 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.483529 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.483563 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:04:37.483542444 +0000 UTC m=+22.094772937 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.483595 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.483635 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:37.483627736 +0000 UTC m=+22.094858219 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.483671 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.483594 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.483705 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:37.483696178 +0000 UTC m=+22.094926661 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.554145 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.554197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.554235 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.554258 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.554273 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.584624 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.584676 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.584783 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.584799 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.584810 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.584851 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:37.584838321 +0000 UTC m=+22.196068804 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.585096 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.585113 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.585122 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.585157 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:37.58514846 +0000 UTC m=+22.196378943 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.594523 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.594527 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.594644 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.594774 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.594826 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:35 crc kubenswrapper[4733]: E1213 07:04:35.594991 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.603123 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.603949 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.604746 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.605502 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.606060 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.606558 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.607094 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.607605 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.608184 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.608682 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.609133 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.609748 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.610260 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.610791 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.612086 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.612844 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.613786 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.613720 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.614306 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.614996 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.615838 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.616390 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.617080 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.617535 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.618145 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.618569 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.619164 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.619967 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.621102 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.621932 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.622527 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.623160 4733 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.623305 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.624821 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.625607 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.626069 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.627384 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.628131 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.628791 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.629762 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.630257 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.631088 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.631649 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.633122 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.634424 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.635060 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.635904 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.636507 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.637491 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.638229 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.639083 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.639553 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.640042 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.640948 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.641589 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.642481 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.656334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.656551 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.656617 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.656630 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.656640 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.759073 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.759126 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.759141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.759161 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.759173 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.861553 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.861600 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.861608 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.861622 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.861632 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.885603 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"2520f86038a62304d505edd8d7ef076e08cb17753659a78e72f9ddf531e93263"} Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.963871 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.963904 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.963916 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.963932 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:35 crc kubenswrapper[4733]: I1213 07:04:35.963942 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:35Z","lastTransitionTime":"2025-12-13T07:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.067065 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.067164 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.067195 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.067270 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.067296 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.170693 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.170791 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.170815 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.170848 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.170875 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.273335 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.273381 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.273395 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.273415 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.273432 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.377020 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.377102 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.377127 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.377160 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.377184 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.480197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.480272 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.480285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.480305 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.480320 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.583708 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.583750 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.583760 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.583779 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.583790 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.686349 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.686406 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.686421 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.686447 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.686467 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.789294 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.789353 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.789367 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.789392 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.789407 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.891521 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.891569 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.891587 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.891610 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.891624 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.994775 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.994820 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.994831 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.994846 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:36 crc kubenswrapper[4733]: I1213 07:04:36.994856 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:36Z","lastTransitionTime":"2025-12-13T07:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.098161 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.098254 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.098271 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.098289 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.098301 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.201626 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.201673 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.201683 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.201702 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.201714 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.303784 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.303861 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.303878 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.303900 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.303915 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.405894 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.405941 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.405949 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.405963 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.405973 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.500571 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.500653 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.500679 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.500701 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:04:41.500677305 +0000 UTC m=+26.111907788 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.500767 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.500773 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.500811 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:41.500803058 +0000 UTC m=+26.112033541 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.500826 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:41.500819359 +0000 UTC m=+26.112049842 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.508279 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.508336 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.508350 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.508368 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.508377 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.549688 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.582248 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.594042 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.594141 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.594155 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.594267 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.594285 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.594466 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.596573 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.601726 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.602003 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602110 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602153 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602167 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602173 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602193 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602246 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602278 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:41.602248549 +0000 UTC m=+26.213479062 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:37 crc kubenswrapper[4733]: E1213 07:04:37.602317 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:41.60229603 +0000 UTC m=+26.213526563 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.609563 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.609767 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.609782 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.609791 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.609803 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.609812 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.625235 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.647726 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.711896 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.711938 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.711950 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.711967 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.711980 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.815055 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.815121 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.815136 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.815158 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.815176 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.908118 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.913179 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.913434 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.915286 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.917694 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.917757 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.917770 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.917791 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.917802 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:37Z","lastTransitionTime":"2025-12-13T07:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.918396 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966"} Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.932672 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.946022 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.958017 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.969780 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:37 crc kubenswrapper[4733]: I1213 07:04:37.985668 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.001421 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.019369 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.020600 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.020661 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.020672 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.020694 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.020706 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.032171 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.051903 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.067325 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.080344 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.094569 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.113767 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.123983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.124102 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.124130 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.124147 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.124162 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.130404 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.146166 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.162487 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.226939 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.227675 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.227695 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.227714 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.227725 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.330646 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.330700 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.330712 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.330732 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.330743 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.433890 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.433927 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.433937 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.433954 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.433965 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.536191 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.536254 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.536267 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.536284 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.536294 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.638242 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.638283 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.638292 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.638306 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.638317 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.740187 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.740243 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.740252 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.740273 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.740282 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.843135 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.843184 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.843199 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.843290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.843358 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.922020 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.945274 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.945523 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.945556 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.945567 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.945584 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.945595 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:38Z","lastTransitionTime":"2025-12-13T07:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.960847 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.974546 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:38 crc kubenswrapper[4733]: I1213 07:04:38.987315 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.002143 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:38Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.015725 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.027784 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.041084 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.048090 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.048119 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.048129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.048142 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.048151 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.149854 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.149887 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.149895 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.149940 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.149949 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.166313 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.169534 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.176817 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.186486 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.197851 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.209583 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.223044 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.236235 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.250384 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.251999 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.252030 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.252039 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.252052 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.252061 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.261688 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.274070 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.289180 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.301181 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.312547 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.325451 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.338588 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.352121 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.353933 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.353978 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.353989 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.354008 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.354020 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.364502 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.382949 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.396336 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:39Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.456281 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.456355 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.456371 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.456396 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.456407 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.558662 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.558698 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.558709 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.558725 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.558736 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.594004 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.594039 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.594097 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:39 crc kubenswrapper[4733]: E1213 07:04:39.594148 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:39 crc kubenswrapper[4733]: E1213 07:04:39.594205 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:39 crc kubenswrapper[4733]: E1213 07:04:39.594343 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.660867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.660915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.660927 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.660944 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.660956 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.763287 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.763325 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.763334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.763348 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.763358 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.865012 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.865044 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.865053 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.865066 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.865077 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:39 crc kubenswrapper[4733]: E1213 07:04:39.930497 4733 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.967300 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.967335 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.967348 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.967364 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:39 crc kubenswrapper[4733]: I1213 07:04:39.967375 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:39Z","lastTransitionTime":"2025-12-13T07:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.069929 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.069962 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.069974 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.069991 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.070004 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.173112 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.173156 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.173165 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.173184 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.173194 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.275178 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.275222 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.275232 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.275246 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.275254 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.377419 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.377461 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.377473 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.377491 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.377504 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.479892 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.479939 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.479950 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.479965 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.479974 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.582932 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.582974 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.582984 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.582998 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.583007 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.685617 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.685654 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.685664 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.685681 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.685692 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.788327 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.788398 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.788413 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.788437 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.788450 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.890831 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.890871 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.890883 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.890897 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.890907 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.928313 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.943851 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:40Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.959770 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:40Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.983721 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:40Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.993261 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.993322 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.993334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.993348 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.993356 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:40Z","lastTransitionTime":"2025-12-13T07:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:40 crc kubenswrapper[4733]: I1213 07:04:40.998206 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:40Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.011433 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.023656 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.038400 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.050150 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.070111 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.095633 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.095679 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.095718 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.095753 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.095766 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.197971 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.198010 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.198018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.198033 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.198042 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.304106 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.304159 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.304171 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.304190 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.304203 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.319940 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-h7vxp"] Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.320338 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.323564 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.323883 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.324202 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-mn7vx"] Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.324609 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-v25b7"] Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.325243 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.325831 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.330482 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.330824 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.330965 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.335814 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-hostroot\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.335855 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-etc-kubernetes\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.335943 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-system-cni-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336010 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-daemon-config\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336043 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-multus-certs\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336082 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-k8s-cni-cncf-io\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336107 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-kubelet\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336185 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-cni-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336266 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-cnibin\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336300 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/abd0f65b-5959-4a7c-b89d-d22d56c6b6fc-hosts-file\") pod \"node-resolver-mn7vx\" (UID: \"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\") " pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336319 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-os-release\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336344 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-socket-dir-parent\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336364 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqvlx\" (UniqueName: \"kubernetes.io/projected/abd0f65b-5959-4a7c-b89d-d22d56c6b6fc-kube-api-access-mqvlx\") pod \"node-resolver-mn7vx\" (UID: \"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\") " pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336388 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9lfj\" (UniqueName: \"kubernetes.io/projected/8f3658c4-6596-4806-936b-1367da3bfb0e-kube-api-access-q9lfj\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336420 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-cnibin\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336445 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-os-release\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336473 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3662e179-d702-4e13-a972-a5b6ab155e2c-cni-binary-copy\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336547 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw9l4\" (UniqueName: \"kubernetes.io/projected/3662e179-d702-4e13-a972-a5b6ab155e2c-kube-api-access-jw9l4\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336581 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-system-cni-dir\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336645 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-netns\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336675 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336701 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/8f3658c4-6596-4806-936b-1367da3bfb0e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336802 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-cni-multus\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336851 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8f3658c4-6596-4806-936b-1367da3bfb0e-cni-binary-copy\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336907 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-cni-bin\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.336938 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-conf-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.337065 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.337367 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.337477 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.337810 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.338976 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.353383 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.372804 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.393485 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.406557 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.406611 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.406624 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.406643 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.406943 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.413359 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437593 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-k8s-cni-cncf-io\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437640 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-kubelet\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437674 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-cni-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437697 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-cnibin\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437719 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/abd0f65b-5959-4a7c-b89d-d22d56c6b6fc-hosts-file\") pod \"node-resolver-mn7vx\" (UID: \"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\") " pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437742 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-os-release\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437763 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-cnibin\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437784 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-os-release\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437803 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3662e179-d702-4e13-a972-a5b6ab155e2c-cni-binary-copy\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437822 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-socket-dir-parent\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437845 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqvlx\" (UniqueName: \"kubernetes.io/projected/abd0f65b-5959-4a7c-b89d-d22d56c6b6fc-kube-api-access-mqvlx\") pod \"node-resolver-mn7vx\" (UID: \"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\") " pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437865 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9lfj\" (UniqueName: \"kubernetes.io/projected/8f3658c4-6596-4806-936b-1367da3bfb0e-kube-api-access-q9lfj\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437887 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw9l4\" (UniqueName: \"kubernetes.io/projected/3662e179-d702-4e13-a972-a5b6ab155e2c-kube-api-access-jw9l4\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437909 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-system-cni-dir\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437928 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-netns\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437947 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.437967 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/8f3658c4-6596-4806-936b-1367da3bfb0e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438015 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-cni-multus\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438044 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-cni-bin\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438064 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8f3658c4-6596-4806-936b-1367da3bfb0e-cni-binary-copy\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438085 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-conf-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438117 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-hostroot\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438138 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-etc-kubernetes\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438157 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-system-cni-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438193 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-daemon-config\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438235 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-multus-certs\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438332 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-multus-certs\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438379 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-k8s-cni-cncf-io\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438411 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-kubelet\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438607 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-cni-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438654 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-system-cni-dir\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438687 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-cnibin\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438733 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/abd0f65b-5959-4a7c-b89d-d22d56c6b6fc-hosts-file\") pod \"node-resolver-mn7vx\" (UID: \"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\") " pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438785 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-os-release\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438833 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-cnibin\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.438893 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-os-release\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439104 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-etc-kubernetes\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439185 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-conf-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439244 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-hostroot\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439318 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-system-cni-dir\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439450 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-run-netns\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439463 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-socket-dir-parent\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439515 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439604 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-cni-multus\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439706 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3662e179-d702-4e13-a972-a5b6ab155e2c-host-var-lib-cni-bin\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.439785 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8f3658c4-6596-4806-936b-1367da3bfb0e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.440736 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3662e179-d702-4e13-a972-a5b6ab155e2c-multus-daemon-config\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.440793 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3662e179-d702-4e13-a972-a5b6ab155e2c-cni-binary-copy\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.440971 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/8f3658c4-6596-4806-936b-1367da3bfb0e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.440990 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8f3658c4-6596-4806-936b-1367da3bfb0e-cni-binary-copy\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.462798 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.465060 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqvlx\" (UniqueName: \"kubernetes.io/projected/abd0f65b-5959-4a7c-b89d-d22d56c6b6fc-kube-api-access-mqvlx\") pod \"node-resolver-mn7vx\" (UID: \"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\") " pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.466047 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9lfj\" (UniqueName: \"kubernetes.io/projected/8f3658c4-6596-4806-936b-1367da3bfb0e-kube-api-access-q9lfj\") pod \"multus-additional-cni-plugins-v25b7\" (UID: \"8f3658c4-6596-4806-936b-1367da3bfb0e\") " pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.468351 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw9l4\" (UniqueName: \"kubernetes.io/projected/3662e179-d702-4e13-a972-a5b6ab155e2c-kube-api-access-jw9l4\") pod \"multus-h7vxp\" (UID: \"3662e179-d702-4e13-a972-a5b6ab155e2c\") " pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.475789 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.488298 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.500612 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.510148 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.510226 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.510238 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.510256 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.510268 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.514371 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.528193 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.539184 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.539337 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.539382 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:04:49.539347961 +0000 UTC m=+34.150578444 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.539450 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.539501 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.539517 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:49.539500605 +0000 UTC m=+34.150731108 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.539636 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.539683 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:49.539675121 +0000 UTC m=+34.150905674 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.543962 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.555609 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.568192 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.577646 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.592697 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.593875 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.593939 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.593982 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.594096 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.593947 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.594274 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.604648 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.612320 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.612377 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.612386 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.612399 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.612409 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.616178 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.631354 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-h7vxp" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.631428 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.638184 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mn7vx" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.640646 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.640684 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.640807 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.640832 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.640845 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.640888 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:49.640872894 +0000 UTC m=+34.252103377 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.641275 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.641296 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.641306 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:41 crc kubenswrapper[4733]: E1213 07:04:41.641378 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:04:49.641326427 +0000 UTC m=+34.252556910 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.643827 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-v25b7" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.656204 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: W1213 07:04:41.657864 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabd0f65b_5959_4a7c_b89d_d22d56c6b6fc.slice/crio-0295484ea0f25c172bea1eb0e564d2ca3bbc9804c8387480e75f7ab904c0491a WatchSource:0}: Error finding container 0295484ea0f25c172bea1eb0e564d2ca3bbc9804c8387480e75f7ab904c0491a: Status 404 returned error can't find the container with id 0295484ea0f25c172bea1eb0e564d2ca3bbc9804c8387480e75f7ab904c0491a Dec 13 07:04:41 crc kubenswrapper[4733]: W1213 07:04:41.659253 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f3658c4_6596_4806_936b_1367da3bfb0e.slice/crio-6cf5fdc8549040e7c7a2d43dd8ca3086a50d7d01610906b325eb2d0b2c3a01b5 WatchSource:0}: Error finding container 6cf5fdc8549040e7c7a2d43dd8ca3086a50d7d01610906b325eb2d0b2c3a01b5: Status 404 returned error can't find the container with id 6cf5fdc8549040e7c7a2d43dd8ca3086a50d7d01610906b325eb2d0b2c3a01b5 Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.671374 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.683422 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.701145 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-d4w5t"] Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.701655 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.702537 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vsc8k"] Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.703518 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705082 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705353 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705506 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705524 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705605 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705685 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705699 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705839 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.705932 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.706028 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.706139 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.706293 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.714813 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.714844 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.714855 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.714869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.714880 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.717383 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.729833 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.741425 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742090 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-systemd-units\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742150 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-run-ovn-kubernetes\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742186 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsrwv\" (UniqueName: \"kubernetes.io/projected/b6fa9235-730d-46d0-9211-563c96e073fb-kube-api-access-jsrwv\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742230 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa175c9e-efff-4835-8b54-c98307e46f0b-proxy-tls\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742253 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-node-log\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742274 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-ovn\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742306 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-kubelet\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742321 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-log-socket\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742342 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-cni-netd\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742381 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-systemd\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742406 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-slash\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742428 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-var-lib-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742448 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-cni-bin\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742471 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-run-netns\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742492 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-ovnkube-script-lib\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742514 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-etc-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742532 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-ovnkube-config\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742554 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b6fa9235-730d-46d0-9211-563c96e073fb-ovn-node-metrics-cert\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742587 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742608 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fa175c9e-efff-4835-8b54-c98307e46f0b-rootfs\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742640 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742667 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-env-overrides\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742692 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa175c9e-efff-4835-8b54-c98307e46f0b-mcd-auth-proxy-config\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.742713 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgfbq\" (UniqueName: \"kubernetes.io/projected/fa175c9e-efff-4835-8b54-c98307e46f0b-kube-api-access-cgfbq\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.750868 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.761761 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.773367 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.784548 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.795511 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.806612 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.816560 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.816690 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.816704 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.816722 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.816733 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.817910 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.832611 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843325 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843443 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-kubelet\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843474 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-log-socket\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843495 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-cni-netd\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843500 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-kubelet\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843528 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-systemd\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843539 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-log-socket\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843549 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-slash\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843562 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-cni-netd\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843759 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-run-netns\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843774 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-systemd\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843778 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-var-lib-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843796 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-cni-bin\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843815 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-run-netns\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843819 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-ovnkube-script-lib\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843836 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-cni-bin\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843849 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-etc-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843855 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-var-lib-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843885 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-ovnkube-config\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843903 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b6fa9235-730d-46d0-9211-563c96e073fb-ovn-node-metrics-cert\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843921 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843943 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fa175c9e-efff-4835-8b54-c98307e46f0b-rootfs\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843963 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844014 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-env-overrides\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844039 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa175c9e-efff-4835-8b54-c98307e46f0b-mcd-auth-proxy-config\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844060 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgfbq\" (UniqueName: \"kubernetes.io/projected/fa175c9e-efff-4835-8b54-c98307e46f0b-kube-api-access-cgfbq\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844080 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-systemd-units\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844101 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-run-ovn-kubernetes\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844122 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsrwv\" (UniqueName: \"kubernetes.io/projected/b6fa9235-730d-46d0-9211-563c96e073fb-kube-api-access-jsrwv\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844141 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa175c9e-efff-4835-8b54-c98307e46f0b-proxy-tls\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844160 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-node-log\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844190 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-ovn\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844257 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-ovn\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844343 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-run-ovn-kubernetes\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844453 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fa175c9e-efff-4835-8b54-c98307e46f0b-rootfs\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844459 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-systemd-units\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.843799 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-slash\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844492 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-run-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844524 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844545 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-etc-openvswitch\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844585 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b6fa9235-730d-46d0-9211-563c96e073fb-node-log\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.844792 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-ovnkube-script-lib\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.845289 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa175c9e-efff-4835-8b54-c98307e46f0b-mcd-auth-proxy-config\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.846549 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-ovnkube-config\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.846839 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b6fa9235-730d-46d0-9211-563c96e073fb-env-overrides\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.848563 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b6fa9235-730d-46d0-9211-563c96e073fb-ovn-node-metrics-cert\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.850469 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa175c9e-efff-4835-8b54-c98307e46f0b-proxy-tls\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.858065 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgfbq\" (UniqueName: \"kubernetes.io/projected/fa175c9e-efff-4835-8b54-c98307e46f0b-kube-api-access-cgfbq\") pod \"machine-config-daemon-d4w5t\" (UID: \"fa175c9e-efff-4835-8b54-c98307e46f0b\") " pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.858421 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsrwv\" (UniqueName: \"kubernetes.io/projected/b6fa9235-730d-46d0-9211-563c96e073fb-kube-api-access-jsrwv\") pod \"ovnkube-node-vsc8k\" (UID: \"b6fa9235-730d-46d0-9211-563c96e073fb\") " pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.863913 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.884563 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.897194 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.907421 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.919164 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.919200 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.919224 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.919250 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.919260 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:41Z","lastTransitionTime":"2025-12-13T07:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.924716 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.932451 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerStarted","Data":"6cf5fdc8549040e7c7a2d43dd8ca3086a50d7d01610906b325eb2d0b2c3a01b5"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.933875 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h7vxp" event={"ID":"3662e179-d702-4e13-a972-a5b6ab155e2c","Type":"ContainerStarted","Data":"f6c87a0d517cecf994ea41a88b2cbd5d3e77bffaa226affbf4ae804e6bb909ba"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.934683 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mn7vx" event={"ID":"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc","Type":"ContainerStarted","Data":"0295484ea0f25c172bea1eb0e564d2ca3bbc9804c8387480e75f7ab904c0491a"} Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.936806 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.948814 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.964396 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.975540 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.987428 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:41 crc kubenswrapper[4733]: I1213 07:04:41.996790 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:41Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.009853 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:42Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.017986 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.021407 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.021475 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.021491 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.021506 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.021522 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.021832 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:42Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.023699 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:42 crc kubenswrapper[4733]: W1213 07:04:42.029370 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa175c9e_efff_4835_8b54_c98307e46f0b.slice/crio-d57da3b9f124182de03d3aacb61577a46150671813d6f91b1c83b3b899f46ccf WatchSource:0}: Error finding container d57da3b9f124182de03d3aacb61577a46150671813d6f91b1c83b3b899f46ccf: Status 404 returned error can't find the container with id d57da3b9f124182de03d3aacb61577a46150671813d6f91b1c83b3b899f46ccf Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.034123 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:42Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:42 crc kubenswrapper[4733]: W1213 07:04:42.039839 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6fa9235_730d_46d0_9211_563c96e073fb.slice/crio-65b579b16dcfd4f739815fc9a9652d6396aa8403ada6a0a2885165874b6dbe14 WatchSource:0}: Error finding container 65b579b16dcfd4f739815fc9a9652d6396aa8403ada6a0a2885165874b6dbe14: Status 404 returned error can't find the container with id 65b579b16dcfd4f739815fc9a9652d6396aa8403ada6a0a2885165874b6dbe14 Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.045934 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:42Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.123864 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.123901 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.123913 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.123930 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.123941 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.226564 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.226617 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.226628 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.226647 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.226658 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.329532 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.329588 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.329601 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.329620 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.329630 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.432289 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.432349 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.432361 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.432383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.432399 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.534529 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.534580 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.534592 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.534611 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.534624 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.637118 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.637154 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.637162 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.637176 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.637186 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.739730 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.739777 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.739792 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.739809 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.739820 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.841889 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.841934 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.841946 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.841962 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.841973 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.938517 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"65b579b16dcfd4f739815fc9a9652d6396aa8403ada6a0a2885165874b6dbe14"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.939785 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerStarted","Data":"d57da3b9f124182de03d3aacb61577a46150671813d6f91b1c83b3b899f46ccf"} Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.944404 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.944452 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.944468 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.944486 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:42 crc kubenswrapper[4733]: I1213 07:04:42.944499 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:42Z","lastTransitionTime":"2025-12-13T07:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.047521 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.047603 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.047629 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.047670 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.047694 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.149970 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.150004 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.150016 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.150035 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.150047 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.252100 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.252138 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.252149 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.252167 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.252180 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.354317 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.354363 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.354375 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.354391 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.354403 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.456247 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.456282 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.456290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.456304 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.456314 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.558553 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.558596 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.558607 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.558624 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.558638 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.594094 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.594672 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:43 crc kubenswrapper[4733]: E1213 07:04:43.594864 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.595308 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:43 crc kubenswrapper[4733]: E1213 07:04:43.595411 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:43 crc kubenswrapper[4733]: E1213 07:04:43.595525 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.661467 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.661513 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.661523 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.661538 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.661546 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.764124 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.764169 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.764180 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.764197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.764227 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.866717 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.866748 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.866758 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.866773 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.866783 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.930665 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.930713 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.930728 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.930747 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.930764 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.944495 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerStarted","Data":"3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.944541 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerStarted","Data":"e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.946438 4733 generic.go:334] "Generic (PLEG): container finished" podID="8f3658c4-6596-4806-936b-1367da3bfb0e" containerID="0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5" exitCode=0 Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.946492 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerDied","Data":"0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.949069 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h7vxp" event={"ID":"3662e179-d702-4e13-a972-a5b6ab155e2c","Type":"ContainerStarted","Data":"4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.950491 4733 generic.go:334] "Generic (PLEG): container finished" podID="b6fa9235-730d-46d0-9211-563c96e073fb" containerID="0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5" exitCode=0 Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.950571 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerDied","Data":"0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5"} Dec 13 07:04:43 crc kubenswrapper[4733]: E1213 07:04:43.951741 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:43Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.951938 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mn7vx" event={"ID":"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc","Type":"ContainerStarted","Data":"e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b"} Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.961541 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:43Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.962448 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.962487 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.962497 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.962514 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.962525 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: E1213 07:04:43.977229 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:43Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.978370 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:43Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.982461 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.982502 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.982512 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.982527 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.982537 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:43 crc kubenswrapper[4733]: E1213 07:04:43.994765 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:43Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.998751 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.998801 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.998813 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.998830 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:43 crc kubenswrapper[4733]: I1213 07:04:43.998841 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:43Z","lastTransitionTime":"2025-12-13T07:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.000564 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:43Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: E1213 07:04:44.012124 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.015571 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.015607 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.015619 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.015637 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.015649 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.023879 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: E1213 07:04:44.026787 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: E1213 07:04:44.026891 4733 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.028736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.028761 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.028769 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.028783 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.028793 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.036069 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.049137 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.062706 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.073907 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.086827 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.100715 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.117314 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.131412 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.131458 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.131469 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.131485 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.131499 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.131999 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.148869 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.168751 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.185278 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.197677 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.211637 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.227581 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.234105 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.234289 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.234371 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.234479 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.234577 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.243258 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.254528 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.272923 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.285770 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-4pmlw"] Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.286077 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.286195 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.287924 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.287977 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.288008 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.288167 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.300930 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.312197 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.336509 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.336540 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.336549 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.336562 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.336573 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.338384 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.351691 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.365145 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.367650 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr26g\" (UniqueName: \"kubernetes.io/projected/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-kube-api-access-qr26g\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.367745 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-host\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.367780 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-serviceca\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.383390 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.395004 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.409541 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.419628 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.431894 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.438002 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.438034 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.438043 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.438059 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.438068 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.443313 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.460750 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.468857 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-host\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.468910 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-serviceca\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.468939 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr26g\" (UniqueName: \"kubernetes.io/projected/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-kube-api-access-qr26g\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.468984 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-host\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.469856 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-serviceca\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.475524 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.487725 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.488056 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr26g\" (UniqueName: \"kubernetes.io/projected/e2e3d2e4-4723-4ec9-bf01-ac029216fa91-kube-api-access-qr26g\") pod \"node-ca-4pmlw\" (UID: \"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\") " pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.507852 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.520455 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.534908 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.540560 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.540591 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.540600 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.540615 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.540624 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.549299 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.561386 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.572853 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.589077 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.596533 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4pmlw" Dec 13 07:04:44 crc kubenswrapper[4733]: W1213 07:04:44.606401 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2e3d2e4_4723_4ec9_bf01_ac029216fa91.slice/crio-ab851e4bf655cdb737c970c28802124a23e7fcc58804d5007a7f71d1e445b300 WatchSource:0}: Error finding container ab851e4bf655cdb737c970c28802124a23e7fcc58804d5007a7f71d1e445b300: Status 404 returned error can't find the container with id ab851e4bf655cdb737c970c28802124a23e7fcc58804d5007a7f71d1e445b300 Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.642691 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.642734 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.642745 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.642762 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.642774 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.745353 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.745733 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.745748 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.745768 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.745782 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.848426 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.848472 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.848490 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.848512 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.848528 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.950988 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.951017 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.951027 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.951040 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.951048 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:44Z","lastTransitionTime":"2025-12-13T07:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.961621 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.961661 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.961671 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.961679 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.961688 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.961696 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.963391 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4pmlw" event={"ID":"e2e3d2e4-4723-4ec9-bf01-ac029216fa91","Type":"ContainerStarted","Data":"5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.963439 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4pmlw" event={"ID":"e2e3d2e4-4723-4ec9-bf01-ac029216fa91","Type":"ContainerStarted","Data":"ab851e4bf655cdb737c970c28802124a23e7fcc58804d5007a7f71d1e445b300"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.966722 4733 generic.go:334] "Generic (PLEG): container finished" podID="8f3658c4-6596-4806-936b-1367da3bfb0e" containerID="8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82" exitCode=0 Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.966800 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerDied","Data":"8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82"} Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.981444 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:44 crc kubenswrapper[4733]: I1213 07:04:44.993484 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:44Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.017091 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.029462 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.039673 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.052009 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.054779 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.054829 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.054845 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.054867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.054883 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.064427 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.078065 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.089027 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.103377 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.115985 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.131318 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.141593 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.157648 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.157722 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.157740 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.157762 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.157775 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.158506 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.179373 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.199468 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.213330 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.224498 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.239383 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.250722 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.259838 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.260184 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.260239 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.260254 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.260271 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.260282 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.269959 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.281608 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.293491 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.304487 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.314923 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.326165 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.339412 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.348651 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.361274 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.362493 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.362525 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.362536 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.362550 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.362559 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.464944 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.464987 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.464996 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.465011 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.465021 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.567775 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.567820 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.567834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.567852 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.567865 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.594197 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.594275 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:45 crc kubenswrapper[4733]: E1213 07:04:45.594357 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.594275 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:45 crc kubenswrapper[4733]: E1213 07:04:45.594444 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:45 crc kubenswrapper[4733]: E1213 07:04:45.594490 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.617546 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.626153 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.637978 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.648604 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.660262 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.669730 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.669771 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.669783 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.669797 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.669809 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.672053 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.684141 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.696909 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.709734 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.719365 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.730659 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.748288 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.759587 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.771682 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.771725 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.771734 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.771751 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.771767 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.772138 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.789000 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.873684 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.873732 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.873741 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.873757 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.873767 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.972014 4733 generic.go:334] "Generic (PLEG): container finished" podID="8f3658c4-6596-4806-936b-1367da3bfb0e" containerID="23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0" exitCode=0 Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.972067 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerDied","Data":"23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.977001 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.977023 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.977032 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.977045 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.977054 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:45Z","lastTransitionTime":"2025-12-13T07:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.984247 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:45 crc kubenswrapper[4733]: I1213 07:04:45.996517 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:45Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.009955 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.025309 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.037392 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.048855 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.061201 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.078325 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.078823 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.078851 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.078860 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.078873 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.078883 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.090347 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.106564 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.122682 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.146731 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.163758 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.179413 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.181771 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.181820 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.181831 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.181968 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.182016 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.201519 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.285114 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.285171 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.285182 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.285201 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.285237 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.387914 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.387965 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.387978 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.387998 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.388013 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.491820 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.491879 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.491891 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.491913 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.491926 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.594918 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.594959 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.594968 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.594984 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.594995 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.697537 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.697589 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.697599 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.697615 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.697626 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.800777 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.800829 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.800839 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.800856 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.800866 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.902824 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.902867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.902878 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.902894 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.902906 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:46Z","lastTransitionTime":"2025-12-13T07:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.983899 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerStarted","Data":"75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9"} Dec 13 07:04:46 crc kubenswrapper[4733]: I1213 07:04:46.998608 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:46Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.005759 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.005804 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.005816 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.005835 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.005847 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.016780 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.030357 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.044627 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.057854 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.076919 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.089965 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.101427 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.107431 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.107461 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.107471 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.107484 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.107492 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.120390 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.133114 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.146152 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.160762 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.173049 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.183753 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.193687 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:47Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.209035 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.209059 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.209068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.209081 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.209091 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.310979 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.311290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.311361 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.311445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.311508 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.414403 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.414673 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.414826 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.415123 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.415363 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.518105 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.518135 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.518144 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.518176 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.518187 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.594366 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.594393 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.594463 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:47 crc kubenswrapper[4733]: E1213 07:04:47.594522 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:47 crc kubenswrapper[4733]: E1213 07:04:47.594676 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:47 crc kubenswrapper[4733]: E1213 07:04:47.594745 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.620603 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.620644 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.620652 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.620665 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.620674 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.722670 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.722702 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.722711 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.722727 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.722737 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.830475 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.830528 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.830558 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.830575 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.830598 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.932837 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.932877 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.932889 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.932906 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.932918 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:47Z","lastTransitionTime":"2025-12-13T07:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.989454 4733 generic.go:334] "Generic (PLEG): container finished" podID="8f3658c4-6596-4806-936b-1367da3bfb0e" containerID="75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9" exitCode=0 Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.989490 4733 generic.go:334] "Generic (PLEG): container finished" podID="8f3658c4-6596-4806-936b-1367da3bfb0e" containerID="f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469" exitCode=0 Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.989540 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerDied","Data":"75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.989565 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerDied","Data":"f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469"} Dec 13 07:04:47 crc kubenswrapper[4733]: I1213 07:04:47.993440 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.006186 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.026138 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.035583 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.035631 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.035643 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.035667 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.035678 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.037706 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.053518 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.067292 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.085639 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.105441 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.118052 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.128630 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.139194 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.139251 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.139266 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.139285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.139295 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.140093 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.157154 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.169554 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.185272 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.201403 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.214090 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:48Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.242325 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.242401 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.242428 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.242459 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.242483 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.344770 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.344866 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.344906 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.344938 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.344960 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.446916 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.446974 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.446987 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.447005 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.447016 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.549661 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.549700 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.549709 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.549727 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.549736 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.652949 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.653018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.653028 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.653047 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.653065 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.755528 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.755576 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.755589 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.755609 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.755621 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.858673 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.858716 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.858725 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.858743 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.858754 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.961231 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.961269 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.961280 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.961294 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.961304 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:48Z","lastTransitionTime":"2025-12-13T07:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.999164 4733 generic.go:334] "Generic (PLEG): container finished" podID="8f3658c4-6596-4806-936b-1367da3bfb0e" containerID="fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc" exitCode=0 Dec 13 07:04:48 crc kubenswrapper[4733]: I1213 07:04:48.999229 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerDied","Data":"fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.013082 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.029120 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.044071 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.053883 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.063566 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.063600 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.063608 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.063621 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.063630 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.069071 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.090380 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.107064 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.118880 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.138049 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.154480 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.165970 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.166013 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.166026 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.166041 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.166052 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.166473 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.179367 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.193191 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.206940 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.220277 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:49Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.268834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.268905 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.268921 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.268952 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.268966 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.372321 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.372361 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.372373 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.372394 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.372408 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.474625 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.475249 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.475298 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.475326 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.475343 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.578625 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.578678 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.578691 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.578712 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.578726 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.594412 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.594448 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.594501 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.594677 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.594802 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.594920 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.618370 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.618527 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.618570 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:05:05.618536603 +0000 UTC m=+50.229767086 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.618624 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.618692 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:05.618674526 +0000 UTC m=+50.229905010 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.618738 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.618897 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.618965 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:05.618947944 +0000 UTC m=+50.230178427 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.681633 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.681706 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.681720 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.681740 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.681752 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.719361 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.719426 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719582 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719597 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719608 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719661 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:05.719648417 +0000 UTC m=+50.330878900 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719732 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719800 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719826 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:49 crc kubenswrapper[4733]: E1213 07:04:49.719960 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:05.719916974 +0000 UTC m=+50.331147587 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.784040 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.784087 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.784099 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.784116 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.784129 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.886694 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.886738 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.886751 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.886768 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.886780 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.989143 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.989179 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.989191 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.989230 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:49 crc kubenswrapper[4733]: I1213 07:04:49.989248 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:49Z","lastTransitionTime":"2025-12-13T07:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.005823 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.006184 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.006228 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.006241 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.012452 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" event={"ID":"8f3658c4-6596-4806-936b-1367da3bfb0e","Type":"ContainerStarted","Data":"1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.020531 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.036646 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.049490 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.061256 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.071616 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.071732 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.072429 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.084839 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.090948 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.090977 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.090988 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.091005 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.091019 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.098405 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.112628 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.125532 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.137624 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.152203 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.171708 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.182992 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.193033 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.193521 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.193560 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.193569 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.193584 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.193593 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.211655 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.223841 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.235523 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.245814 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.256293 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.265631 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.276453 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.289591 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.295815 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.295853 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.295865 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.295881 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.295892 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.300938 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.313306 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.326022 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.338555 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.359239 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.387151 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.398567 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.398612 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.398623 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.398640 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.398653 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.406035 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.434010 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:50Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.500944 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.500975 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.500983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.500995 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.501005 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.602625 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.602682 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.602698 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.602718 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.602733 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.704511 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.704555 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.704566 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.704583 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.704592 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.807295 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.807350 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.807362 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.807380 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.807394 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.910054 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.910093 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.910104 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.910118 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:50 crc kubenswrapper[4733]: I1213 07:04:50.910129 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:50Z","lastTransitionTime":"2025-12-13T07:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.012349 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.012377 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.012386 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.012399 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.012407 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.114717 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.114753 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.114763 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.114776 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.114785 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.217245 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.217288 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.217301 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.217317 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.217329 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.319406 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.319435 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.319443 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.319455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.319464 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.421165 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.421219 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.421229 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.421245 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.421277 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.523233 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.523285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.523298 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.523316 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.523327 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.593794 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.593871 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:51 crc kubenswrapper[4733]: E1213 07:04:51.593977 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.594057 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:51 crc kubenswrapper[4733]: E1213 07:04:51.594245 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:51 crc kubenswrapper[4733]: E1213 07:04:51.594337 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.625888 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.625930 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.625940 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.625956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.625967 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.728617 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.728643 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.728651 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.728664 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.728672 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.832017 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.832048 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.832058 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.832073 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.832082 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.934153 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.934197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.934224 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.934241 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:51 crc kubenswrapper[4733]: I1213 07:04:51.934251 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:51Z","lastTransitionTime":"2025-12-13T07:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.036926 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.036970 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.036984 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.037000 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.037013 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.147987 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.148020 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.148035 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.148054 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.148066 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.250819 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.250867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.250878 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.250895 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.250906 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.352814 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.352847 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.352860 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.352877 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.352890 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.455540 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.455579 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.455589 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.455605 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.455615 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.557728 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.557790 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.557805 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.557825 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.557840 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.660656 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.660707 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.660736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.660757 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.660774 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.762747 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.762820 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.762834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.762850 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.762860 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.792570 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd"] Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.792946 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.794289 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.794589 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.815582 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.826961 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.837873 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.848117 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.854800 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.854864 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssn42\" (UniqueName: \"kubernetes.io/projected/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-kube-api-access-ssn42\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.854926 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.855045 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.864129 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.865276 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.865306 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.865314 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.865357 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.865368 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.876551 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.888375 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.899722 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.911493 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.921551 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.933747 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.942548 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.954245 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.955636 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.955732 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.955759 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.955786 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssn42\" (UniqueName: \"kubernetes.io/projected/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-kube-api-access-ssn42\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.956493 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.956750 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.960783 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.966467 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.967996 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.968029 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.968039 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.968055 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.968067 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:52Z","lastTransitionTime":"2025-12-13T07:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.971891 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssn42\" (UniqueName: \"kubernetes.io/projected/6562fcf1-dd70-4d8c-954c-1e5775cdd5c5-kube-api-access-ssn42\") pod \"ovnkube-control-plane-749d76644c-vwcvd\" (UID: \"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.980784 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:52 crc kubenswrapper[4733]: I1213 07:04:52.993902 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:52Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.021990 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/0.log" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.024675 4733 generic.go:334] "Generic (PLEG): container finished" podID="b6fa9235-730d-46d0-9211-563c96e073fb" containerID="9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc" exitCode=1 Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.024722 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerDied","Data":"9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.025403 4733 scope.go:117] "RemoveContainer" containerID="9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.041029 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.053368 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.070415 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.070452 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.070461 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.070474 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.070483 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.076042 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1213 07:04:52.123092 6040 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1213 07:04:52.123116 6040 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1213 07:04:52.123129 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 07:04:52.123135 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 07:04:52.123140 6040 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 07:04:52.123154 6040 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 07:04:52.123162 6040 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1213 07:04:52.123163 6040 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 07:04:52.123174 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 07:04:52.123176 6040 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 07:04:52.123221 6040 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 07:04:52.123258 6040 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1213 07:04:52.123268 6040 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 07:04:52.123274 6040 factory.go:656] Stopping watch factory\\\\nI1213 07:04:52.123287 6040 ovnkube.go:599] Stopped ovnkube\\\\nI1213 07:04:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.085414 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.096755 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.106228 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.111352 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: W1213 07:04:53.118471 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6562fcf1_dd70_4d8c_954c_1e5775cdd5c5.slice/crio-a8da906d9c0fdd2367163907f049f49d71f714c2fa1bf14f886ad81789264e01 WatchSource:0}: Error finding container a8da906d9c0fdd2367163907f049f49d71f714c2fa1bf14f886ad81789264e01: Status 404 returned error can't find the container with id a8da906d9c0fdd2367163907f049f49d71f714c2fa1bf14f886ad81789264e01 Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.123768 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.137196 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.152965 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.167361 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.172005 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.172064 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.172074 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.172088 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.172117 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.180536 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.190784 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.202061 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.212722 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.230093 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.243332 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.274732 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.274766 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.274776 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.274791 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.274800 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.377223 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.377265 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.377276 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.377290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.377298 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.479171 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.479230 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.479239 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.479254 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.479264 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.580834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.580903 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.580914 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.580928 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.580937 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.594230 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.594269 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.594282 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:53 crc kubenswrapper[4733]: E1213 07:04:53.594374 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:53 crc kubenswrapper[4733]: E1213 07:04:53.594480 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:53 crc kubenswrapper[4733]: E1213 07:04:53.594561 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.683401 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.683435 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.683446 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.683462 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.683475 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.785373 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.785407 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.785419 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.785433 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.785443 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.887640 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.887681 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.887723 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.887737 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.887746 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.990565 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.990607 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.990617 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.990634 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:53 crc kubenswrapper[4733]: I1213 07:04:53.990645 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:53Z","lastTransitionTime":"2025-12-13T07:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.030294 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" event={"ID":"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5","Type":"ContainerStarted","Data":"172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.030369 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" event={"ID":"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5","Type":"ContainerStarted","Data":"c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.030381 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" event={"ID":"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5","Type":"ContainerStarted","Data":"a8da906d9c0fdd2367163907f049f49d71f714c2fa1bf14f886ad81789264e01"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.032161 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/1.log" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.033022 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/0.log" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.037063 4733 generic.go:334] "Generic (PLEG): container finished" podID="b6fa9235-730d-46d0-9211-563c96e073fb" containerID="3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4" exitCode=1 Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.037120 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerDied","Data":"3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.037203 4733 scope.go:117] "RemoveContainer" containerID="9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.038442 4733 scope.go:117] "RemoveContainer" containerID="3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4" Dec 13 07:04:54 crc kubenswrapper[4733]: E1213 07:04:54.038711 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.056837 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.071483 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.091922 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1213 07:04:52.123092 6040 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1213 07:04:52.123116 6040 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1213 07:04:52.123129 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 07:04:52.123135 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 07:04:52.123140 6040 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 07:04:52.123154 6040 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 07:04:52.123162 6040 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1213 07:04:52.123163 6040 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 07:04:52.123174 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 07:04:52.123176 6040 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 07:04:52.123221 6040 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 07:04:52.123258 6040 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1213 07:04:52.123268 6040 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 07:04:52.123274 6040 factory.go:656] Stopping watch factory\\\\nI1213 07:04:52.123287 6040 ovnkube.go:599] Stopped ovnkube\\\\nI1213 07:04:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.093955 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.094002 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.094018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.094048 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.094062 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.108020 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.121628 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.136712 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.151699 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.163973 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.179485 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.195910 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.197641 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.197683 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.197695 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.197710 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.197719 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.210260 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.226854 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.243283 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.260430 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.279237 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.290676 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.299929 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.299969 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.299979 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.299993 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.300003 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.312834 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.327908 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.335275 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.335320 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.335329 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.335344 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.335354 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.343740 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: E1213 07:04:54.346081 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.351067 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.351111 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.351124 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.351141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.351156 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.358439 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: E1213 07:04:54.370609 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.374827 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.374858 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.374867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.374880 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.374890 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.380163 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1213 07:04:52.123092 6040 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1213 07:04:52.123116 6040 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1213 07:04:52.123129 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 07:04:52.123135 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 07:04:52.123140 6040 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 07:04:52.123154 6040 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 07:04:52.123162 6040 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1213 07:04:52.123163 6040 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 07:04:52.123174 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 07:04:52.123176 6040 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 07:04:52.123221 6040 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 07:04:52.123258 6040 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1213 07:04:52.123268 6040 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 07:04:52.123274 6040 factory.go:656] Stopping watch factory\\\\nI1213 07:04:52.123287 6040 ovnkube.go:599] Stopped ovnkube\\\\nI1213 07:04:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: E1213 07:04:54.387973 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.392226 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.392256 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.392266 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.392290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.392303 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.400883 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: E1213 07:04:54.414796 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.419382 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.419435 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.419450 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.419471 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.419484 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.425002 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: E1213 07:04:54.434923 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: E1213 07:04:54.435047 4733 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.437730 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.437768 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.437777 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.437794 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.437804 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.438475 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.451041 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.461560 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.475309 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.488067 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.505996 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.522344 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.536361 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.540075 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.540124 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.540137 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.540153 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.540166 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.554116 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:54Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.642732 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.642784 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.642793 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.642808 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.642817 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.745014 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.745048 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.745058 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.745071 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.745079 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.847253 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.847300 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.847329 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.847343 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.847353 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.949821 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.949868 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.949911 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.949930 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:54 crc kubenswrapper[4733]: I1213 07:04:54.949940 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:54Z","lastTransitionTime":"2025-12-13T07:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.002880 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-r5dfg"] Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.003403 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.003489 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.018342 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.031712 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.042020 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/1.log" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.050300 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.052118 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.052165 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.052177 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.052196 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.052242 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.065586 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.075855 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.075894 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdqx9\" (UniqueName: \"kubernetes.io/projected/868ca71b-6aae-4054-8b03-cf7c2b3499cd-kube-api-access-xdqx9\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.077021 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.089619 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.102550 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.115759 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.129501 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.147344 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.154775 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.154817 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.154826 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.154869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.154881 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.160066 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.176774 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.176824 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdqx9\" (UniqueName: \"kubernetes.io/projected/868ca71b-6aae-4054-8b03-cf7c2b3499cd-kube-api-access-xdqx9\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.177000 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.177073 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:04:55.677055357 +0000 UTC m=+40.288285840 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.179720 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.192235 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.194200 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdqx9\" (UniqueName: \"kubernetes.io/projected/868ca71b-6aae-4054-8b03-cf7c2b3499cd-kube-api-access-xdqx9\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.205544 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.218373 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.235706 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1213 07:04:52.123092 6040 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1213 07:04:52.123116 6040 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1213 07:04:52.123129 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 07:04:52.123135 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 07:04:52.123140 6040 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 07:04:52.123154 6040 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 07:04:52.123162 6040 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1213 07:04:52.123163 6040 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 07:04:52.123174 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 07:04:52.123176 6040 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 07:04:52.123221 6040 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 07:04:52.123258 6040 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1213 07:04:52.123268 6040 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 07:04:52.123274 6040 factory.go:656] Stopping watch factory\\\\nI1213 07:04:52.123287 6040 ovnkube.go:599] Stopped ovnkube\\\\nI1213 07:04:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.246437 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.257388 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.257426 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.257434 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.257449 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.257458 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.359535 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.359600 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.359609 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.359624 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.359632 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.462776 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.462825 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.462849 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.462868 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.462880 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.533045 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.547034 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.560290 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.565397 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.565466 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.565477 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.565492 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.565503 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.573715 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.584884 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.594456 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.594472 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.594482 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.594567 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.594637 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.594746 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.597770 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.609732 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.620408 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.666966 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.666996 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.667006 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.667020 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.667029 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.670406 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.680127 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.680338 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:55 crc kubenswrapper[4733]: E1213 07:04:55.680446 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:04:56.680408536 +0000 UTC m=+41.291639019 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.689379 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.699041 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.711585 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.738272 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.752131 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.763170 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.769324 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.769361 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.769380 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.769394 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.769403 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.776372 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.786758 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.802924 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1213 07:04:52.123092 6040 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1213 07:04:52.123116 6040 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1213 07:04:52.123129 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 07:04:52.123135 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 07:04:52.123140 6040 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 07:04:52.123154 6040 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 07:04:52.123162 6040 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1213 07:04:52.123163 6040 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 07:04:52.123174 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 07:04:52.123176 6040 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 07:04:52.123221 6040 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 07:04:52.123258 6040 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1213 07:04:52.123268 6040 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 07:04:52.123274 6040 factory.go:656] Stopping watch factory\\\\nI1213 07:04:52.123287 6040 ovnkube.go:599] Stopped ovnkube\\\\nI1213 07:04:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.823321 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.833604 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.849726 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1213 07:04:52.123092 6040 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1213 07:04:52.123116 6040 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1213 07:04:52.123129 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 07:04:52.123135 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 07:04:52.123140 6040 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 07:04:52.123154 6040 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 07:04:52.123162 6040 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1213 07:04:52.123163 6040 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 07:04:52.123174 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 07:04:52.123176 6040 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 07:04:52.123221 6040 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 07:04:52.123258 6040 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1213 07:04:52.123268 6040 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 07:04:52.123274 6040 factory.go:656] Stopping watch factory\\\\nI1213 07:04:52.123287 6040 ovnkube.go:599] Stopped ovnkube\\\\nI1213 07:04:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.860173 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.871879 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.871913 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.871923 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.871935 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.871946 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.875192 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.889912 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.902499 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.916054 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.925546 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.935873 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.947863 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.959901 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.970883 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.974433 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.974461 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.974472 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.974487 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.974495 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:55Z","lastTransitionTime":"2025-12-13T07:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.983151 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:55 crc kubenswrapper[4733]: I1213 07:04:55.996443 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:55Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.010545 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:56Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.021471 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:56Z is after 2025-08-24T17:21:41Z" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.076357 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.076398 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.076409 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.076426 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.076436 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.178761 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.178803 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.178817 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.178836 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.178848 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.280986 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.281031 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.281040 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.281054 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.281063 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.383534 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.383573 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.383586 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.383602 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.383620 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.486516 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.486556 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.486565 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.486578 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.486589 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.588336 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.588373 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.588383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.588396 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.588405 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.593989 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:56 crc kubenswrapper[4733]: E1213 07:04:56.594282 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.690616 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.690681 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.690699 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.690723 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.690739 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.691772 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:56 crc kubenswrapper[4733]: E1213 07:04:56.691906 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:56 crc kubenswrapper[4733]: E1213 07:04:56.691976 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:04:58.69195953 +0000 UTC m=+43.303190033 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.792598 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.792643 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.792659 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.792676 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.792689 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.895182 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.895252 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.895265 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.895287 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.895300 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.997622 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.997681 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.997697 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.997719 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:56 crc kubenswrapper[4733]: I1213 07:04:56.997734 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:56Z","lastTransitionTime":"2025-12-13T07:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.099952 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.099992 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.100002 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.100019 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.100030 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.202763 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.202815 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.202853 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.202884 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.202898 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.304745 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.304780 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.304788 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.304802 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.304811 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.406350 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.406400 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.406413 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.406431 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.406443 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.509287 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.509336 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.509346 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.509366 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.509377 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.594380 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.594380 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:57 crc kubenswrapper[4733]: E1213 07:04:57.594524 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:57 crc kubenswrapper[4733]: E1213 07:04:57.594579 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.594398 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:57 crc kubenswrapper[4733]: E1213 07:04:57.594675 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.611810 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.611869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.611882 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.611897 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.611907 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.714496 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.714540 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.714548 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.714563 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.714572 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.816864 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.816896 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.816907 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.816922 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.816932 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.919311 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.919353 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.919366 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.919380 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:57 crc kubenswrapper[4733]: I1213 07:04:57.919392 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:57Z","lastTransitionTime":"2025-12-13T07:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.022367 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.022447 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.022471 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.022492 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.022504 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.124422 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.124497 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.124519 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.124547 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.124569 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.226702 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.226770 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.226787 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.226803 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.226822 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.329540 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.329601 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.329610 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.329623 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.329633 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.432623 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.432698 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.432722 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.432746 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.432767 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.535680 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.535745 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.535766 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.535793 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.535806 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.594700 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:58 crc kubenswrapper[4733]: E1213 07:04:58.594875 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.638287 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.638340 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.638351 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.638369 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.638379 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.712077 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:04:58 crc kubenswrapper[4733]: E1213 07:04:58.712304 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:58 crc kubenswrapper[4733]: E1213 07:04:58.713022 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:05:02.712998649 +0000 UTC m=+47.324229132 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.740343 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.740374 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.740381 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.740394 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.740403 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.843068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.843101 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.843112 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.843127 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.843138 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.946757 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.946823 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.946850 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.946874 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:58 crc kubenswrapper[4733]: I1213 07:04:58.946893 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:58Z","lastTransitionTime":"2025-12-13T07:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.049815 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.049881 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.049896 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.049915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.049927 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.152984 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.153023 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.153032 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.153047 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.153056 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.255409 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.255459 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.255472 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.255489 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.255501 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.357983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.358037 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.358049 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.358067 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.358079 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.460018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.460052 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.460061 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.460076 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.460088 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.564003 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.564036 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.564045 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.564061 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.564071 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.593890 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.593889 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:04:59 crc kubenswrapper[4733]: E1213 07:04:59.594013 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:04:59 crc kubenswrapper[4733]: E1213 07:04:59.594068 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.593889 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:04:59 crc kubenswrapper[4733]: E1213 07:04:59.594125 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.666272 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.666313 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.666323 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.666341 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.666352 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.768230 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.768458 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.768469 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.768485 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.768494 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.870736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.870998 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.871068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.871136 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.871197 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.974095 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.974145 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.974160 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.974184 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:04:59 crc kubenswrapper[4733]: I1213 07:04:59.974199 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:04:59Z","lastTransitionTime":"2025-12-13T07:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.076749 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.076792 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.076804 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.076823 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.076835 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.178834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.178869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.178878 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.178893 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.178903 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.281179 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.281244 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.281255 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.281273 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.281285 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.383809 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.384063 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.384143 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.384248 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.384329 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.486640 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.486887 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.486993 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.487089 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.487188 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.589344 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.589636 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.589730 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.589827 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.589892 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.593679 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:00 crc kubenswrapper[4733]: E1213 07:05:00.593802 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.691612 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.691844 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.691939 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.692027 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.692107 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.794817 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.794866 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.794877 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.794894 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.794905 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.897528 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.897560 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.897568 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.897584 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.897594 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.999687 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.999732 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.999741 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:00 crc kubenswrapper[4733]: I1213 07:05:00.999756 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:00.999766 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:00Z","lastTransitionTime":"2025-12-13T07:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.102095 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.102125 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.102134 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.102147 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.102155 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.204792 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.204856 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.204872 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.204894 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.204910 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.307910 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.307971 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.307985 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.308002 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.308014 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.410981 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.411016 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.411027 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.411042 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.411054 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.513686 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.513728 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.513740 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.513755 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.513766 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.593728 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.593770 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.593728 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:01 crc kubenswrapper[4733]: E1213 07:05:01.593894 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:01 crc kubenswrapper[4733]: E1213 07:05:01.594047 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:01 crc kubenswrapper[4733]: E1213 07:05:01.594168 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.616258 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.616298 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.616306 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.616326 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.616335 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.718678 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.718717 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.718752 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.718773 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.718786 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.820654 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.820696 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.820707 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.820719 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.820729 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.922841 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.922892 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.922903 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.922915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:01 crc kubenswrapper[4733]: I1213 07:05:01.922924 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:01Z","lastTransitionTime":"2025-12-13T07:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.025724 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.025793 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.025806 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.025826 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.025838 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.128773 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.128814 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.128823 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.128838 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.128847 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.231594 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.231648 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.231659 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.231680 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.231693 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.334319 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.334359 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.334369 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.334383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.334393 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.438180 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.438294 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.438318 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.438348 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.438367 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.542174 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.542291 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.542322 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.542540 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.542564 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.594001 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:02 crc kubenswrapper[4733]: E1213 07:05:02.594166 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.646507 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.646579 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.646592 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.646613 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.646627 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.748930 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.748965 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.748975 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.748990 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.749002 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.752056 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:02 crc kubenswrapper[4733]: E1213 07:05:02.752166 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:02 crc kubenswrapper[4733]: E1213 07:05:02.752235 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:05:10.752203217 +0000 UTC m=+55.363433700 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.850701 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.850736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.850747 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.850765 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.850778 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.954300 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.954334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.954343 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.954355 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:02 crc kubenswrapper[4733]: I1213 07:05:02.954363 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:02Z","lastTransitionTime":"2025-12-13T07:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.056533 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.056822 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.056931 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.057021 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.057089 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.159733 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.159759 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.159767 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.159779 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.159787 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.262387 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.262847 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.262910 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.263038 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.263124 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.365519 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.365555 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.365566 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.365579 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.365587 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.467446 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.467482 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.467491 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.467506 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.467516 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.569829 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.569872 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.569909 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.569927 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.569940 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.594857 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.594880 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.594902 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:03 crc kubenswrapper[4733]: E1213 07:05:03.595129 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:03 crc kubenswrapper[4733]: E1213 07:05:03.595292 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:03 crc kubenswrapper[4733]: E1213 07:05:03.595499 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.672262 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.672312 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.672320 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.672333 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.672366 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.774776 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.774839 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.774851 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.774869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.774945 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.877900 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.877959 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.877971 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.878003 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.878014 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.980785 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.980837 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.980848 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.980864 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:03 crc kubenswrapper[4733]: I1213 07:05:03.980874 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:03Z","lastTransitionTime":"2025-12-13T07:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.084595 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.084646 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.084657 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.084681 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.084732 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.188030 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.188130 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.188148 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.188168 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.188179 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.290741 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.290798 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.290808 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.290828 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.290840 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.393157 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.393203 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.393231 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.393245 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.393256 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.496752 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.496820 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.496834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.496855 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.496870 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.579165 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.579255 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.579271 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.579293 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.579306 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.593784 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:04 crc kubenswrapper[4733]: E1213 07:05:04.593920 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:04 crc kubenswrapper[4733]: E1213 07:05:04.594662 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:04Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.600030 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.600068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.600077 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.600092 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.600104 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: E1213 07:05:04.623660 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:04Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.628142 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.628224 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.628237 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.628261 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.628277 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: E1213 07:05:04.642139 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:04Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.646528 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.646573 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.646587 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.646606 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.646621 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: E1213 07:05:04.659945 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:04Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.663806 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.663848 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.663860 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.663879 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.663892 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: E1213 07:05:04.676479 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:04Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:04 crc kubenswrapper[4733]: E1213 07:05:04.676628 4733 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.677956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.678003 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.678014 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.678029 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.678038 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.780302 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.780345 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.780355 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.780368 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.780377 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.882424 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.882474 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.882488 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.882506 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.882517 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.984744 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.984775 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.984786 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.984799 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:04 crc kubenswrapper[4733]: I1213 07:05:04.984809 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:04Z","lastTransitionTime":"2025-12-13T07:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.086892 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.086928 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.086939 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.086954 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.086962 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.189334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.189664 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.189842 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.189968 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.190062 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.293184 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.293239 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.293249 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.293265 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.293276 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.395627 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.395671 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.395683 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.395711 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.395723 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.497811 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.497855 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.497866 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.497882 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.497893 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.594618 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.594682 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.594755 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.594789 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.594879 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.594975 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.599609 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.599649 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.599660 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.599678 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.599689 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.607397 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.623566 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.648522 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c3318deb9db88d1cd50d8d77e887ee7472a2fee07b8738b65f0388bf8aafecc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"message\\\":\\\"sip/v1/apis/informers/externalversions/factory.go:140\\\\nI1213 07:04:52.123092 6040 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1213 07:04:52.123116 6040 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1213 07:04:52.123129 6040 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1213 07:04:52.123135 6040 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1213 07:04:52.123140 6040 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1213 07:04:52.123154 6040 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1213 07:04:52.123162 6040 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1213 07:04:52.123163 6040 handler.go:208] Removed *v1.Node event handler 2\\\\nI1213 07:04:52.123174 6040 handler.go:208] Removed *v1.Node event handler 7\\\\nI1213 07:04:52.123176 6040 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1213 07:04:52.123221 6040 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1213 07:04:52.123258 6040 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1213 07:04:52.123268 6040 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1213 07:04:52.123274 6040 factory.go:656] Stopping watch factory\\\\nI1213 07:04:52.123287 6040 ovnkube.go:599] Stopped ovnkube\\\\nI1213 07:04:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.664117 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.678914 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.685192 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.685377 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.685413 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:05:37.685387464 +0000 UTC m=+82.296617957 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.685460 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.685470 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.685585 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:37.685576429 +0000 UTC m=+82.296806912 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.685495 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.685666 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:37.685654352 +0000 UTC m=+82.296884835 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.691467 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.701814 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.701875 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.701885 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.701900 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.701909 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.705100 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.720052 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.734424 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.749026 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.766952 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.783910 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.785914 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.786069 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.786180 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.786201 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.786227 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.786275 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:37.786262102 +0000 UTC m=+82.397492585 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.786549 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.786570 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.786578 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:05 crc kubenswrapper[4733]: E1213 07:05:05.787299 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:05:37.787255949 +0000 UTC m=+82.398486452 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.796339 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.803991 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.804029 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.804077 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.804098 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.804111 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.809594 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.822573 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.843017 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.856624 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:05Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.906496 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.906572 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.906585 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.906601 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:05 crc kubenswrapper[4733]: I1213 07:05:05.906614 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:05Z","lastTransitionTime":"2025-12-13T07:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.009343 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.009384 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.009393 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.009407 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.009416 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.111489 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.111521 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.111530 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.111544 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.111552 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.214287 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.214337 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.214348 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.214364 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.214375 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.316530 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.316581 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.316592 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.316610 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.316623 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.419259 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.419311 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.419319 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.419332 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.419377 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.521876 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.521907 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.521915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.521928 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.521940 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.594049 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:06 crc kubenswrapper[4733]: E1213 07:05:06.594199 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.594800 4733 scope.go:117] "RemoveContainer" containerID="3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.622116 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.624905 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.624938 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.624950 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.624966 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.624977 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.633943 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.649664 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.664857 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.683840 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.697331 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.709561 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.721424 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.726568 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.726598 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.726607 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.726621 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.726630 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.732092 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.743789 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.754537 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.764483 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.777444 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.789483 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.801087 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.815501 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.825626 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:06Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.827960 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.828015 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.828027 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.828042 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.828054 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.930731 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.930778 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.930790 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.930807 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:06 crc kubenswrapper[4733]: I1213 07:05:06.930818 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:06Z","lastTransitionTime":"2025-12-13T07:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.032841 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.032873 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.032880 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.032893 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.032902 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.083314 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/1.log" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.085884 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.086335 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.101061 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.123446 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.135455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.135507 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.135517 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.135535 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.135547 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.137432 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.147167 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.159025 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.178095 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.190031 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.202496 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.218379 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.230145 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.238529 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.238591 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.238607 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.238629 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.238648 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.248355 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.261702 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.272346 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.282891 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.296090 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.311025 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.325404 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:07Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.341186 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.341249 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.341258 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.341271 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.341280 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.443575 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.443633 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.443647 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.443666 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.443682 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.546334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.546390 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.546408 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.546427 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.546437 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.593714 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.593768 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:07 crc kubenswrapper[4733]: E1213 07:05:07.593863 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.593889 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:07 crc kubenswrapper[4733]: E1213 07:05:07.593969 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:07 crc kubenswrapper[4733]: E1213 07:05:07.594168 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.648767 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.649062 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.649158 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.649256 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.649332 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.751900 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.752167 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.752265 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.752358 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.752431 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.855274 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.855314 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.855328 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.855346 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.855357 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.956912 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.956972 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.957005 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.957022 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:07 crc kubenswrapper[4733]: I1213 07:05:07.957035 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:07Z","lastTransitionTime":"2025-12-13T07:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.045328 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.055716 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.059083 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.059122 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.059135 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.059152 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.059164 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.060258 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.073811 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.091121 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/2.log" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.092124 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/1.log" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.092160 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.095321 4733 generic.go:334] "Generic (PLEG): container finished" podID="b6fa9235-730d-46d0-9211-563c96e073fb" containerID="fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e" exitCode=1 Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.095359 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerDied","Data":"fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.095429 4733 scope.go:117] "RemoveContainer" containerID="3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.096153 4733 scope.go:117] "RemoveContainer" containerID="fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e" Dec 13 07:05:08 crc kubenswrapper[4733]: E1213 07:05:08.096327 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.107502 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.117292 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.128200 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.146076 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.158231 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.162121 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.162156 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.162166 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.162182 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.162192 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.176826 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.187356 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.201175 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.214605 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.228393 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.241357 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.250879 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.262353 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.264303 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.264346 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.264358 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.264378 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.264392 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.275024 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.289008 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.305373 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.316117 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.328125 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.340236 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.351451 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.367331 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.367398 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.367413 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.367428 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.367437 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.371186 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.382898 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.396120 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.409765 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.432158 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d09241979f124ebe13bde4c585d707091ab20072f847b52568a8b97f63ae0f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:04:53Z\\\",\\\"message\\\":\\\"art default network controller: unable to create admin network policy controller, err: could not add Event Handler for banpInformer during admin network policy controller initialization, handler {0x1fcc300 0x1fcbfe0 0x1fcbf80} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:04:53Z is after 2025-08-24T17:21:41Z]\\\\nI1213 07:04:53.875032 6180 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"6ea1fd71-2b40-4361-92ee-3f1ab4ec7414\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.444491 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.456855 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.471237 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.471286 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.471298 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.471317 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.471328 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.472128 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.486560 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.527840 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.545804 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.561010 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:08Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.573999 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.574800 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.574839 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.574856 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.574868 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.594353 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:08 crc kubenswrapper[4733]: E1213 07:05:08.594483 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.677397 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.677456 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.677467 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.677484 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.677495 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.780129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.780238 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.780253 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.780271 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.780282 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.882185 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.882261 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.882273 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.882290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.882301 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.984700 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.984738 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.984750 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.984767 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:08 crc kubenswrapper[4733]: I1213 07:05:08.984780 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:08Z","lastTransitionTime":"2025-12-13T07:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.087899 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.087960 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.087974 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.087996 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.088039 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.100272 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/2.log" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.103549 4733 scope.go:117] "RemoveContainer" containerID="fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e" Dec 13 07:05:09 crc kubenswrapper[4733]: E1213 07:05:09.103702 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.123575 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.136442 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.150367 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.165034 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.185405 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.190370 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.190419 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.190431 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.190445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.190455 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.198758 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.213348 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.230913 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.245699 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.261625 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.276738 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.291443 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.292962 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.293030 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.293051 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.293075 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.293089 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.309647 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.324155 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.338335 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.355309 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.375525 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.388323 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:09Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.395318 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.395384 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.395425 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.395447 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.395459 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.498347 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.498383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.498392 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.498412 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.498422 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.594198 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.594318 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:09 crc kubenswrapper[4733]: E1213 07:05:09.594407 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:09 crc kubenswrapper[4733]: E1213 07:05:09.594514 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.594655 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:09 crc kubenswrapper[4733]: E1213 07:05:09.594726 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.600280 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.600314 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.600324 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.600339 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.600348 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.702059 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.702385 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.702454 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.702550 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.702745 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.805496 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.805537 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.805545 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.805559 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.805567 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.907942 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.907978 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.907989 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.908003 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:09 crc kubenswrapper[4733]: I1213 07:05:09.908013 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:09Z","lastTransitionTime":"2025-12-13T07:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.011154 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.011222 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.011235 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.011251 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.011261 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.113829 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.113866 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.113895 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.113912 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.113921 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.217086 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.217414 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.217535 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.217644 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.217727 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.320419 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.320474 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.320488 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.320506 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.320519 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.423983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.424071 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.424093 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.424122 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.424143 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.526980 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.527013 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.527021 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.527036 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.527045 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.594137 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:10 crc kubenswrapper[4733]: E1213 07:05:10.594308 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.629166 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.629275 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.629294 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.629318 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.629332 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.732300 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.732383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.732417 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.732450 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.732468 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.834507 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.834555 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.834565 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.834581 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.834595 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.836906 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:10 crc kubenswrapper[4733]: E1213 07:05:10.837019 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:10 crc kubenswrapper[4733]: E1213 07:05:10.837083 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:05:26.837065015 +0000 UTC m=+71.448295508 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.937139 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.937204 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.937248 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.937273 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:10 crc kubenswrapper[4733]: I1213 07:05:10.937289 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:10Z","lastTransitionTime":"2025-12-13T07:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.040552 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.040593 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.040603 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.040622 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.040633 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.144022 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.144074 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.144083 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.144099 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.144111 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.247599 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.247671 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.247682 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.247703 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.247730 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.350905 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.350947 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.350957 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.350973 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.350982 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.453964 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.454038 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.454060 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.454090 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.454113 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.557160 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.557235 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.557245 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.557260 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.557270 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.594166 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.594197 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:11 crc kubenswrapper[4733]: E1213 07:05:11.594457 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.594257 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:11 crc kubenswrapper[4733]: E1213 07:05:11.594690 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:11 crc kubenswrapper[4733]: E1213 07:05:11.594847 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.660716 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.660784 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.660797 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.660827 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.660842 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.764112 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.764175 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.764201 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.764249 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.764274 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.867237 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.867285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.867297 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.867322 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.867335 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.969791 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.969834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.969843 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.969855 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:11 crc kubenswrapper[4733]: I1213 07:05:11.969864 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:11Z","lastTransitionTime":"2025-12-13T07:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.073283 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.073321 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.073332 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.073349 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.073391 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.175507 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.175548 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.175557 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.175571 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.175583 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.278264 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.278320 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.278367 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.278386 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.278400 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.381156 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.381227 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.381240 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.381262 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.381272 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.483660 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.483702 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.483711 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.483726 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.483737 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.586397 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.586468 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.586483 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.586508 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.586525 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.594677 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:12 crc kubenswrapper[4733]: E1213 07:05:12.594857 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.689020 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.689086 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.689101 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.689126 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.689142 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.791350 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.791394 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.791404 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.791420 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.791430 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.894503 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.894574 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.894589 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.894614 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.894633 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.998035 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.998122 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.998135 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.998159 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:12 crc kubenswrapper[4733]: I1213 07:05:12.998172 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:12Z","lastTransitionTime":"2025-12-13T07:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.100352 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.100390 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.100409 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.100427 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.100438 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.202355 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.202405 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.202421 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.202441 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.202455 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.304448 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.304490 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.304504 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.304522 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.304533 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.407132 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.407696 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.407768 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.407856 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.407924 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.510750 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.510791 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.510799 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.510815 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.510824 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.594169 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.594280 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:13 crc kubenswrapper[4733]: E1213 07:05:13.594408 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:13 crc kubenswrapper[4733]: E1213 07:05:13.594298 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.594429 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:13 crc kubenswrapper[4733]: E1213 07:05:13.594503 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.612936 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.612976 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.612987 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.613000 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.613013 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.715155 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.715189 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.715200 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.715230 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.715244 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.817941 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.817981 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.817990 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.818004 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.818013 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.919797 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.919838 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.919847 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.919866 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:13 crc kubenswrapper[4733]: I1213 07:05:13.919875 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:13Z","lastTransitionTime":"2025-12-13T07:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.022481 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.022507 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.022515 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.022528 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.022537 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.124914 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.124976 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.124988 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.125009 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.125021 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.227473 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.227518 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.227531 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.227548 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.227562 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.329435 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.329475 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.329488 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.329501 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.329510 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.432908 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.432959 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.432971 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.432990 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.433003 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.535621 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.535682 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.535697 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.535714 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.535725 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.594653 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:14 crc kubenswrapper[4733]: E1213 07:05:14.594805 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.638493 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.638530 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.638539 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.638554 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.638563 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.740751 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.740787 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.740796 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.740811 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.740821 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.843240 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.843285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.843294 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.843308 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.843318 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.862598 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.862654 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.862666 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.862684 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.862695 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: E1213 07:05:14.879859 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:14Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.884098 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.884150 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.884160 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.884176 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.884185 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: E1213 07:05:14.895232 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:14Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.899285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.899324 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.899335 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.899371 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.899383 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: E1213 07:05:14.912009 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:14Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.915831 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.915888 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.915900 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.915917 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.915980 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: E1213 07:05:14.928397 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:14Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.933336 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.933437 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.933451 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.933468 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.933481 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:14 crc kubenswrapper[4733]: E1213 07:05:14.947973 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:14Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:14 crc kubenswrapper[4733]: E1213 07:05:14.948140 4733 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.949950 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.949991 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.950001 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.950021 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:14 crc kubenswrapper[4733]: I1213 07:05:14.950033 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:14Z","lastTransitionTime":"2025-12-13T07:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.053018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.053058 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.053067 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.053091 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.053101 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.155253 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.155298 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.155311 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.155327 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.155339 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.257953 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.257992 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.258002 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.258017 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.258027 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.359936 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.360011 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.360023 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.360038 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.360047 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.462547 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.462594 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.462605 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.462620 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.462630 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.565430 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.565477 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.565490 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.565508 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.565521 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.593741 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.593778 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:15 crc kubenswrapper[4733]: E1213 07:05:15.593869 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.593948 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:15 crc kubenswrapper[4733]: E1213 07:05:15.594032 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:15 crc kubenswrapper[4733]: E1213 07:05:15.594087 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.609121 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.628022 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.640322 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.658190 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.668465 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.668526 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.668538 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.668569 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.668650 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.676228 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.691857 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.705556 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.716464 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.730959 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.741657 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.752071 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.766623 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.770968 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.770999 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.771011 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.771026 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.771039 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.779515 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.792232 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.808055 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.821031 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.835769 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.846195 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:15Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.873257 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.873319 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.873334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.873356 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.873373 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.977710 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.977744 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.977757 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.977784 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:15 crc kubenswrapper[4733]: I1213 07:05:15.977799 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:15Z","lastTransitionTime":"2025-12-13T07:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.081149 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.081464 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.081608 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.081720 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.081845 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.184311 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.184369 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.184381 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.184396 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.184408 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.287242 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.287282 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.287290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.287305 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.287314 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.390115 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.390163 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.390171 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.390186 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.390195 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.491925 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.491957 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.491966 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.491980 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.491990 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.593984 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:16 crc kubenswrapper[4733]: E1213 07:05:16.594183 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.595324 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.595367 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.595377 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.595394 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.595404 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.698335 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.698379 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.698391 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.698408 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.698420 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.800612 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.800650 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.800660 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.800677 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.800690 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.903853 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.903898 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.903908 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.903922 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:16 crc kubenswrapper[4733]: I1213 07:05:16.903932 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:16Z","lastTransitionTime":"2025-12-13T07:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.006902 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.006963 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.006972 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.007197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.007237 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.109920 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.109953 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.109962 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.109979 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.109988 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.212241 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.212282 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.212296 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.212313 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.212326 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.314139 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.314197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.314248 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.314266 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.314282 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.416636 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.416683 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.416694 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.416713 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.416724 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.519141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.519186 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.519197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.519229 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.519243 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.594140 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.594193 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:17 crc kubenswrapper[4733]: E1213 07:05:17.594315 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:17 crc kubenswrapper[4733]: E1213 07:05:17.594478 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.594847 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:17 crc kubenswrapper[4733]: E1213 07:05:17.594963 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.622410 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.622446 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.622455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.622471 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.622480 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.724882 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.724925 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.724938 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.724956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.724968 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.827132 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.827180 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.827190 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.827224 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.827237 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.930068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.930156 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.930181 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.930270 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:17 crc kubenswrapper[4733]: I1213 07:05:17.930299 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:17Z","lastTransitionTime":"2025-12-13T07:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.033187 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.033464 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.033575 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.033653 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.033727 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.136188 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.136304 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.136324 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.136344 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.136358 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.238950 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.238999 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.239011 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.239030 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.239040 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.341474 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.341518 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.341530 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.341547 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.341560 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.444147 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.444200 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.444230 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.444248 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.444260 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.577083 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.577129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.577144 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.577166 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.577184 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.593746 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:18 crc kubenswrapper[4733]: E1213 07:05:18.594101 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.679445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.679475 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.679483 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.679496 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.679504 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.781873 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.781905 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.781915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.781928 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.781938 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.885320 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.885365 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.885374 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.885394 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.885404 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.987346 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.987380 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.987391 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.987405 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:18 crc kubenswrapper[4733]: I1213 07:05:18.987414 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:18Z","lastTransitionTime":"2025-12-13T07:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.089373 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.089412 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.089422 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.089437 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.089447 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.191950 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.191990 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.192011 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.192028 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.192041 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.294017 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.294174 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.294188 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.294225 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.294236 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.396269 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.396326 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.396337 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.396351 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.396360 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.498730 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.498839 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.498852 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.498870 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.498884 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.593758 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.593792 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.593779 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:19 crc kubenswrapper[4733]: E1213 07:05:19.593887 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:19 crc kubenswrapper[4733]: E1213 07:05:19.593964 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:19 crc kubenswrapper[4733]: E1213 07:05:19.594071 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.601172 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.601228 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.601242 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.601260 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.601273 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.702947 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.702983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.702992 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.703005 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.703015 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.804924 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.804983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.804994 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.805008 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.805017 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.906908 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.906959 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.906978 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.907001 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:19 crc kubenswrapper[4733]: I1213 07:05:19.907012 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:19Z","lastTransitionTime":"2025-12-13T07:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.009143 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.009186 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.009195 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.009230 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.009240 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.111713 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.111764 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.111777 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.111794 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.111805 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.213934 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.213992 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.214007 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.214025 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.214037 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.319281 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.319317 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.319326 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.319348 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.319358 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.422915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.422971 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.422983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.423004 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.423016 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.525231 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.525269 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.525281 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.525298 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.525309 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.594276 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:20 crc kubenswrapper[4733]: E1213 07:05:20.594423 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.628174 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.628254 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.628266 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.628301 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.628315 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.730648 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.730716 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.730726 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.730767 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.730776 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.832763 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.832809 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.832821 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.832837 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.832848 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.935784 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.935837 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.935848 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.935865 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:20 crc kubenswrapper[4733]: I1213 07:05:20.935877 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:20Z","lastTransitionTime":"2025-12-13T07:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.038180 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.038248 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.038260 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.038275 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.038287 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.140560 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.140603 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.140612 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.140628 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.140638 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.242923 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.242974 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.242988 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.243006 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.243023 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.345251 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.345302 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.345315 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.345332 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.345343 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.447935 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.447976 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.447990 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.448005 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.448014 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.550576 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.550632 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.550642 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.550677 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.550686 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.593955 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.594022 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.594034 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:21 crc kubenswrapper[4733]: E1213 07:05:21.594098 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:21 crc kubenswrapper[4733]: E1213 07:05:21.594196 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:21 crc kubenswrapper[4733]: E1213 07:05:21.594798 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.595048 4733 scope.go:117] "RemoveContainer" containerID="fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e" Dec 13 07:05:21 crc kubenswrapper[4733]: E1213 07:05:21.595324 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.652364 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.652415 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.652427 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.652445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.652458 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.754809 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.754846 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.754855 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.754869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.754881 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.857291 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.857332 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.857343 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.857358 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.857367 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.959672 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.959941 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.960018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.960113 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:21 crc kubenswrapper[4733]: I1213 07:05:21.960182 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:21Z","lastTransitionTime":"2025-12-13T07:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.088881 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.089746 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.089757 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.089771 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.089779 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.192362 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.192397 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.192407 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.192423 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.192437 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.299995 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.300045 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.300059 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.300080 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.300094 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.402751 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.402803 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.402821 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.402838 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.402847 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.505122 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.505162 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.505171 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.505186 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.505195 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.594255 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:22 crc kubenswrapper[4733]: E1213 07:05:22.594431 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.606933 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.606973 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.606985 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.607003 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.607014 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.708694 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.708746 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.708762 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.708779 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.708789 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.810722 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.810800 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.810810 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.810826 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.810836 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.913608 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.913663 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.913672 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.913684 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:22 crc kubenswrapper[4733]: I1213 07:05:22.913694 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:22Z","lastTransitionTime":"2025-12-13T07:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.015618 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.015675 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.015692 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.015749 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.015768 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.117909 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.117946 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.117956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.117971 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.117980 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.220128 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.220164 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.220172 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.220190 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.220226 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.322730 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.322770 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.322780 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.322813 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.322826 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.424795 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.424862 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.424874 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.424915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.424946 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.526779 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.526824 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.526838 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.526851 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.526860 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.594345 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.594388 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.594357 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:23 crc kubenswrapper[4733]: E1213 07:05:23.594495 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:23 crc kubenswrapper[4733]: E1213 07:05:23.594589 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:23 crc kubenswrapper[4733]: E1213 07:05:23.594660 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.628696 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.628739 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.628752 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.628766 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.628777 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.731435 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.732325 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.732357 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.732374 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.732384 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.835984 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.836071 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.836087 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.836136 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.836154 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.939235 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.939287 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.939296 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.939316 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:23 crc kubenswrapper[4733]: I1213 07:05:23.939326 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:23Z","lastTransitionTime":"2025-12-13T07:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.041625 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.041675 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.041687 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.041706 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.041719 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.143953 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.143984 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.143994 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.144009 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.144019 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.246640 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.246702 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.246712 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.246726 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.246736 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.349529 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.349561 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.349613 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.349627 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.349636 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.452805 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.452844 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.452852 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.452867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.452877 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.555949 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.555986 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.556000 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.556014 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.556023 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.594352 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:24 crc kubenswrapper[4733]: E1213 07:05:24.599381 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.614271 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.658317 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.658357 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.658366 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.658382 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.658393 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.761034 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.761083 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.761095 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.761113 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.761125 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.862817 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.862868 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.862879 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.862898 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.862910 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.964846 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.964891 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.964905 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.964923 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:24 crc kubenswrapper[4733]: I1213 07:05:24.964936 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:24Z","lastTransitionTime":"2025-12-13T07:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.067685 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.067730 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.067741 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.067758 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.067767 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.087130 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.087165 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.087176 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.087193 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.087205 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.099080 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.103599 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.103678 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.103697 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.103719 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.103731 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.115777 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.120231 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.120267 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.120277 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.120295 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.120304 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.132735 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.135832 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.135869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.135880 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.135898 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.135910 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.148655 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.152310 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.152350 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.152361 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.152379 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.152391 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.164668 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.164791 4733 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.170766 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.170806 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.170817 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.170835 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.170847 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.273088 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.273117 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.273125 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.273142 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.273152 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.375110 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.375143 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.375151 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.375165 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.375176 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.478455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.478484 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.478495 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.478511 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.478523 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.580601 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.580640 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.580652 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.580666 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.580677 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.593922 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.593919 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.594008 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.594082 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.594185 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:25 crc kubenswrapper[4733]: E1213 07:05:25.594310 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.604377 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.621979 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.630980 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.642190 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.652499 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.663773 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.679462 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.682268 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.682299 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.682310 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.682325 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.682336 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.689303 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.698714 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.711205 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.722956 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.734418 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.745800 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.759956 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.776009 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.784417 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.784611 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.784682 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.784753 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.784814 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.787849 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.798097 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdafad9b-003c-4e26-91ba-8593a8ec6252\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.809449 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.830205 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:25Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.886702 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.886766 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.886777 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.886790 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.886799 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.988739 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.988778 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.988787 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.988801 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:25 crc kubenswrapper[4733]: I1213 07:05:25.988818 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:25Z","lastTransitionTime":"2025-12-13T07:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.091278 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.091538 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.091614 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.091758 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.091823 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.194269 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.194302 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.194310 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.194324 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.194333 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.296911 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.296956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.296972 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.296990 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.297001 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.399195 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.399537 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.399632 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.399726 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.399811 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.502152 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.502196 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.502207 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.502242 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.502253 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.594539 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:26 crc kubenswrapper[4733]: E1213 07:05:26.594701 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.604688 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.604728 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.604739 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.604753 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.604762 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.706853 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.707062 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.707129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.707192 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.707282 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.809014 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.809280 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.809349 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.809419 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.809518 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.846733 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:26 crc kubenswrapper[4733]: E1213 07:05:26.846930 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:26 crc kubenswrapper[4733]: E1213 07:05:26.847370 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:05:58.847347416 +0000 UTC m=+103.458577899 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.911865 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.911906 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.911918 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.911931 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:26 crc kubenswrapper[4733]: I1213 07:05:26.911941 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:26Z","lastTransitionTime":"2025-12-13T07:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.014534 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.014568 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.014576 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.014589 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.014598 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.116536 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.116584 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.116597 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.116615 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.116629 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.218986 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.219019 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.219028 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.219039 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.219049 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.321867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.321924 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.321937 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.321959 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.321972 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.424360 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.424398 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.424409 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.424422 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.424430 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.527445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.527488 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.527540 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.527556 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.527568 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.594908 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.594989 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:27 crc kubenswrapper[4733]: E1213 07:05:27.595158 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.595168 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:27 crc kubenswrapper[4733]: E1213 07:05:27.595270 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:27 crc kubenswrapper[4733]: E1213 07:05:27.595317 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.631902 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.631946 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.631959 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.631976 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.631989 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.733663 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.733695 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.733703 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.733715 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.733723 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.835765 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.835801 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.835817 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.835834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.835846 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.938232 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.938278 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.938290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.938308 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:27 crc kubenswrapper[4733]: I1213 07:05:27.938321 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:27Z","lastTransitionTime":"2025-12-13T07:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.041164 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.041203 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.041285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.041303 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.041317 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.144098 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.144369 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.144510 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.144632 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.144737 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.246999 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.247043 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.247055 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.247071 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.247083 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.349755 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.349815 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.349832 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.349852 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.349864 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.452471 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.452512 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.452521 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.452535 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.452546 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.555561 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.555600 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.555612 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.555653 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.555663 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.594242 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:28 crc kubenswrapper[4733]: E1213 07:05:28.594375 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.658319 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.658357 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.658367 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.658387 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.658398 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.760436 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.760474 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.760485 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.760501 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.760515 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.862793 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.862831 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.862842 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.862858 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.862913 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.965353 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.965383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.965393 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.965408 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:28 crc kubenswrapper[4733]: I1213 07:05:28.965419 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:28Z","lastTransitionTime":"2025-12-13T07:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.067426 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.067455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.067465 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.067477 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.067486 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.169870 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.169913 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.169928 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.169943 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.169954 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.272359 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.272402 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.272413 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.272429 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.272444 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.375081 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.375119 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.375130 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.375143 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.375152 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.477174 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.477223 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.477232 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.477245 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.477254 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.579129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.579181 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.579191 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.579228 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.579241 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.594509 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.594575 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:29 crc kubenswrapper[4733]: E1213 07:05:29.594641 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.594580 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:29 crc kubenswrapper[4733]: E1213 07:05:29.594711 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:29 crc kubenswrapper[4733]: E1213 07:05:29.594767 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.681703 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.681742 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.681751 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.681765 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.681777 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.784454 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.784491 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.784501 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.784516 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.784527 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.886507 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.886559 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.886571 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.886588 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.886597 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.989018 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.989071 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.989084 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.989103 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:29 crc kubenswrapper[4733]: I1213 07:05:29.989116 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:29Z","lastTransitionTime":"2025-12-13T07:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.091644 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.091702 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.091719 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.091735 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.091747 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.163839 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h7vxp_3662e179-d702-4e13-a972-a5b6ab155e2c/kube-multus/0.log" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.163889 4733 generic.go:334] "Generic (PLEG): container finished" podID="3662e179-d702-4e13-a972-a5b6ab155e2c" containerID="4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262" exitCode=1 Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.163924 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h7vxp" event={"ID":"3662e179-d702-4e13-a972-a5b6ab155e2c","Type":"ContainerDied","Data":"4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.164338 4733 scope.go:117] "RemoveContainer" containerID="4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.180121 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.192039 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.194651 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.194682 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.194707 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.194720 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.194729 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.202785 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdafad9b-003c-4e26-91ba-8593a8ec6252\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.214253 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.226736 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.238279 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.251376 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:29Z\\\",\\\"message\\\":\\\"2025-12-13T07:04:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6\\\\n2025-12-13T07:04:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6 to /host/opt/cni/bin/\\\\n2025-12-13T07:04:43Z [verbose] multus-daemon started\\\\n2025-12-13T07:04:43Z [verbose] Readiness Indicator file check\\\\n2025-12-13T07:05:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.270141 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.281821 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.293403 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.297094 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.297143 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.297158 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.297176 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.297187 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.304611 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.321620 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.332822 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.344457 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.357694 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.370023 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.381192 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.391780 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.398956 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.398999 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.399011 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.399027 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.399038 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.400791 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:30Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.501999 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.502046 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.502056 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.502072 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.502081 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.593807 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:30 crc kubenswrapper[4733]: E1213 07:05:30.594195 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.604779 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.604831 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.604845 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.604864 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.604878 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.707919 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.707960 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.707970 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.707983 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.707993 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.810623 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.810662 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.810671 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.810686 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.810695 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.913151 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.913199 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.913243 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.913262 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:30 crc kubenswrapper[4733]: I1213 07:05:30.913272 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:30Z","lastTransitionTime":"2025-12-13T07:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.016288 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.016330 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.016339 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.016359 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.016371 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.118447 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.118504 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.118514 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.118529 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.118541 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.168744 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h7vxp_3662e179-d702-4e13-a972-a5b6ab155e2c/kube-multus/0.log" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.168822 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h7vxp" event={"ID":"3662e179-d702-4e13-a972-a5b6ab155e2c","Type":"ContainerStarted","Data":"ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.186156 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdafad9b-003c-4e26-91ba-8593a8ec6252\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.202778 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.221941 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.221986 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.221998 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.222014 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.222026 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.228046 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.244082 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.256977 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:29Z\\\",\\\"message\\\":\\\"2025-12-13T07:04:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6\\\\n2025-12-13T07:04:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6 to /host/opt/cni/bin/\\\\n2025-12-13T07:04:43Z [verbose] multus-daemon started\\\\n2025-12-13T07:04:43Z [verbose] Readiness Indicator file check\\\\n2025-12-13T07:05:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.280334 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.292080 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.323199 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.324869 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.324911 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.324923 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.324941 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.324954 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.343558 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.362000 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.379406 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.404644 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.417291 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.428670 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.428711 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.428721 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.428738 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.428749 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.431863 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.447578 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.461506 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.474754 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.485715 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.496705 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:31Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.530746 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.530800 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.530814 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.530833 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.530846 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.594062 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.594084 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.594179 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:31 crc kubenswrapper[4733]: E1213 07:05:31.594297 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:31 crc kubenswrapper[4733]: E1213 07:05:31.594375 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:31 crc kubenswrapper[4733]: E1213 07:05:31.594530 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.633656 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.633758 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.633776 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.633797 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.633810 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.736853 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.736909 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.736920 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.736945 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.736960 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.840233 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.840292 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.840303 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.840325 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.840338 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.942483 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.942593 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.942606 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.942622 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:31 crc kubenswrapper[4733]: I1213 07:05:31.942634 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:31Z","lastTransitionTime":"2025-12-13T07:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.045046 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.045174 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.045242 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.045273 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.045285 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.146997 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.147036 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.147044 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.147058 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.147067 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.250248 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.250304 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.250316 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.250333 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.250342 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.352769 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.352811 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.352822 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.352837 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.352848 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.455362 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.455402 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.455412 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.455427 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.455435 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.557943 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.557976 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.557987 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.558000 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.558008 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.594592 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:32 crc kubenswrapper[4733]: E1213 07:05:32.594725 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.595348 4733 scope.go:117] "RemoveContainer" containerID="fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.660187 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.660505 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.660515 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.660528 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.660561 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.763163 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.763247 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.763262 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.763278 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.763289 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.865063 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.865102 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.865113 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.865126 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.865135 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.967503 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.967554 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.967565 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.967582 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:32 crc kubenswrapper[4733]: I1213 07:05:32.967592 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:32Z","lastTransitionTime":"2025-12-13T07:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.069846 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.069877 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.069888 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.069902 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.069912 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.172154 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.172197 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.172232 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.172252 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.172266 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.174877 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/2.log" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.176848 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.177298 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.197099 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.207113 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdafad9b-003c-4e26-91ba-8593a8ec6252\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.218827 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.230985 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.241002 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.251866 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:29Z\\\",\\\"message\\\":\\\"2025-12-13T07:04:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6\\\\n2025-12-13T07:04:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6 to /host/opt/cni/bin/\\\\n2025-12-13T07:04:43Z [verbose] multus-daemon started\\\\n2025-12-13T07:04:43Z [verbose] Readiness Indicator file check\\\\n2025-12-13T07:05:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.264485 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.274547 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.274591 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.274602 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.274618 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.274628 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.283718 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.294991 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.307045 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.317868 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.334278 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.344125 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.355555 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.369511 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.377041 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.377073 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.377085 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.377101 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.377113 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.383237 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.394738 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.403718 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.413419 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:33Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.479323 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.479364 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.479372 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.479389 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.479400 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.581400 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.581445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.581455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.581473 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.581485 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.594404 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.594409 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:33 crc kubenswrapper[4733]: E1213 07:05:33.594547 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.594409 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:33 crc kubenswrapper[4733]: E1213 07:05:33.594648 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:33 crc kubenswrapper[4733]: E1213 07:05:33.594729 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.683335 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.683381 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.683391 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.683406 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.683416 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.785432 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.785479 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.785494 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.785512 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.785525 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.888184 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.888237 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.888247 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.888262 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.888272 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.990050 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.990088 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.990097 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.990110 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:33 crc kubenswrapper[4733]: I1213 07:05:33.990119 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:33Z","lastTransitionTime":"2025-12-13T07:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.092342 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.092383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.092393 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.092409 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.092420 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.182874 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/3.log" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.183914 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/2.log" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.187328 4733 generic.go:334] "Generic (PLEG): container finished" podID="b6fa9235-730d-46d0-9211-563c96e073fb" containerID="863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63" exitCode=1 Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.187367 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerDied","Data":"863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.187403 4733 scope.go:117] "RemoveContainer" containerID="fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.187970 4733 scope.go:117] "RemoveContainer" containerID="863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63" Dec 13 07:05:34 crc kubenswrapper[4733]: E1213 07:05:34.188160 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.195174 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.195228 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.195241 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.195260 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.195272 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.209029 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd20c2f2af11f5c61f0d243a1b663def955b8b334ab9b04eadb3cae9b6b4b24e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:07Z\\\",\\\"message\\\":\\\"ices do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"3f1b9878-e751-4e46-a226-ce007d2c4aa7\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1213 07:05:07.317019 6385 model_cli\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:34Z\\\",\\\"message\\\":\\\"ncerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1213 07:05:33.482933 6783 lb_config.go:1031] Cluster endpoints for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1213 07:05:33.482953 6783 services_controller.go:443] Built service openshift-cluster-version/cluster-version-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.182\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9099, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF1213 07:05:33.482960 6783 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.221629 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.234672 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.245786 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.257853 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.269431 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.279019 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.290729 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.297150 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.297183 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.297192 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.297223 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.297232 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.303064 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.318327 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.332735 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.344763 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.358493 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:29Z\\\",\\\"message\\\":\\\"2025-12-13T07:04:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6\\\\n2025-12-13T07:04:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6 to /host/opt/cni/bin/\\\\n2025-12-13T07:04:43Z [verbose] multus-daemon started\\\\n2025-12-13T07:04:43Z [verbose] Readiness Indicator file check\\\\n2025-12-13T07:05:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.371813 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.383235 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.393856 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdafad9b-003c-4e26-91ba-8593a8ec6252\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.400182 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.400255 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.400275 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.400296 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.400311 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.404874 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.422038 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.432366 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:34Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.502557 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.502591 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.502599 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.502612 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.502621 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.594546 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:34 crc kubenswrapper[4733]: E1213 07:05:34.594676 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.605244 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.605271 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.605282 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.605295 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.605306 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.706905 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.706944 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.706952 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.706966 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.706976 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.809383 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.809422 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.809434 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.809449 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.809460 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.915690 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.915731 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.915740 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.915755 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:34 crc kubenswrapper[4733]: I1213 07:05:34.915766 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:34Z","lastTransitionTime":"2025-12-13T07:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.017799 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.017926 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.018226 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.018264 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.018277 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.120762 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.120808 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.120819 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.120834 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.120844 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.193422 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/3.log" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.198575 4733 scope.go:117] "RemoveContainer" containerID="863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63" Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.198866 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.212022 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.222983 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.223265 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.223317 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.223358 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.223376 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.223389 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.240672 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:34Z\\\",\\\"message\\\":\\\"ncerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1213 07:05:33.482933 6783 lb_config.go:1031] Cluster endpoints for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1213 07:05:33.482953 6783 services_controller.go:443] Built service openshift-cluster-version/cluster-version-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.182\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9099, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF1213 07:05:33.482960 6783 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.252080 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.264632 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.277608 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.290607 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.304714 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.319583 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.326114 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.326177 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.326189 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.326204 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.326226 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.331269 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.348324 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.358928 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.370635 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdafad9b-003c-4e26-91ba-8593a8ec6252\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.383934 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.397793 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.414383 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.429762 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.430037 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.430129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.430236 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.430336 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.431670 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:29Z\\\",\\\"message\\\":\\\"2025-12-13T07:04:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6\\\\n2025-12-13T07:04:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6 to /host/opt/cni/bin/\\\\n2025-12-13T07:04:43Z [verbose] multus-daemon started\\\\n2025-12-13T07:04:43Z [verbose] Readiness Indicator file check\\\\n2025-12-13T07:05:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.452539 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.466680 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.473874 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.473904 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.473917 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.473932 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.473943 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.489254 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.493729 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.493906 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.493982 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.494091 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.494201 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.511789 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.517877 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.517921 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.517930 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.517946 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.517956 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.530657 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.534089 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.534122 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.534131 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.534146 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.534157 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.545549 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.549763 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.549814 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.549829 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.549848 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.549861 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.563819 4733 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"61aa24df-270f-41df-a3ba-89af404afd63\\\",\\\"systemUUID\\\":\\\"6c562c68-0dae-4032-a6e3-a1edf517d11e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.564362 4733 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.565894 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.565923 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.565931 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.565944 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.565953 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.594318 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.594359 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.594318 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.594457 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.594522 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:35 crc kubenswrapper[4733]: E1213 07:05:35.594631 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.615503 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3f7b2bcd-f629-464d-9ae5-b2f87f5a5bd0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b66eb1888e63fa5ea88db224947143ce7347e8d198b6e2344e448aa5b76025c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96010851fabb0e8ecad1a24159f8e8570b15215da94853a3522d9287fcaaec0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fd127e42217eea622b956254fd77309dc67982dc3b213cf90bed52e3601c1b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0895733b13d869e071a4bca3af2dc695fac87942c15697e468415cf955a62af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7239f9719230d88f07f964767bc7d905a700e4b0e84a5d9af6f65fa899e2926\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaf005d1aaa911a12357f35ab8fd691c2a7ddf3e640d51c19f2cf9b25912c44e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad0f6644c49567924ef40bd2c87d898c970970b3d8d3a00d873aca4beb3ec351\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a04b20d36e07345aba5a70e4b642c9089a1f97d6876f0993307a9060a5d94dde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.627026 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6562fcf1-dd70-4d8c-954c-1e5775cdd5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c088339f948441b6c819e16fc60ee9d15948f0fbfcf1f320f91ad4d6ac905158\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://172322370cfd9802c314c1fb55491627857fa2a4013fdc33e7a551460ad2f62a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ssn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vwcvd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.640371 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17831d74-d1c0-4b8e-aa94-83f240384afb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1213 07:04:27.980017 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1213 07:04:27.981035 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1600924525/tls.crt::/tmp/serving-cert-1600924525/tls.key\\\\\\\"\\\\nI1213 07:04:33.558960 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1213 07:04:33.561669 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1213 07:04:33.561697 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1213 07:04:33.561997 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1213 07:04:33.562009 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1213 07:04:33.566591 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1213 07:04:33.566639 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566646 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1213 07:04:33.566654 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1213 07:04:33.566659 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1213 07:04:33.566665 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1213 07:04:33.566670 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1213 07:04:33.566849 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1213 07:04:33.569072 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.652100 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57705b08c5c22a8f6a746ea0d31f5553619b650a397895d4a78b1e1bdb619818\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.667525 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.667560 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.667571 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.667589 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.667601 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.669940 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6fa9235-730d-46d0-9211-563c96e073fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2f9ba8fca173a04c1da7b6f28602bd31be014bc6fd888ab88116475000f6c825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be40de56e56118bb562970c1b6056c9f59e881e5a8eafa09e26be828056456df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c528006d1e2b3731b15ac7f0a7761340448e77783e4175be505ca740a9a5704\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8dbd419b1c3ae4122a0d55a98bbf202e9df065d78e4d9c006c5f54280c527bdf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48b4a8c4baf2f8e4acd207191c9c039f2a1848a94b17bbe98d7868275d0c6942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aefc703f56dfbd814bd4084c0871af2e3f73045aa73bc305590a5c8f13657d23\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:34Z\\\",\\\"message\\\":\\\"ncerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1213 07:05:33.482933 6783 lb_config.go:1031] Cluster endpoints for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1213 07:05:33.482953 6783 services_controller.go:443] Built service openshift-cluster-version/cluster-version-operator LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.4.182\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:9099, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nF1213 07:05:33.482960 6783 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:05:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040b616e16be92a019922cd3a07451ff9564d521084f097df331214c8c51ec64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f75336bb690d28768a6f0c051b7eceb4f6c4433b276e2f4e3dd3a34e30fc7c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jsrwv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vsc8k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.679879 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"868ca71b-6aae-4054-8b03-cf7c2b3499cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xdqx9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:55Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-r5dfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.689689 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mn7vx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"abd0f65b-5959-4a7c-b89d-d22d56c6b6fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e731a8f1e50c06a465397eba94afe45bcdf78a63447b9e049f273ed36cdcd35b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqvlx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mn7vx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.702512 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa175c9e-efff-4835-8b54-c98307e46f0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cdde607948aa842df3a31e586b9a1a0f08f3f7df3774db3331d3a3068c4074e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cgfbq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.713452 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"603bcb20-a0d9-437f-b7b5-e2628c8f3269\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb28933e2586809a738e2d5a9217bce1f914140e52b66571b74d280bd1b9fa2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b963ca77661ab9a75e078582e1fd0ab97241d9bc061871d523265ae24dc9bcb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b20626b30175339bc0f6540e2c39df74c5b8a81ec3516c644485bcf2592ec21\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.725589 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a862e7357ea17482e789ef621fc530d19d1bb8be3312131436c4be86fb769fdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.738041 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.748603 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.759587 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-h7vxp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3662e179-d702-4e13-a972-a5b6ab155e2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-13T07:05:29Z\\\",\\\"message\\\":\\\"2025-12-13T07:04:43+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6\\\\n2025-12-13T07:04:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b9162649-ace7-4753-9928-f9457e92f4c6 to /host/opt/cni/bin/\\\\n2025-12-13T07:04:43Z [verbose] multus-daemon started\\\\n2025-12-13T07:04:43Z [verbose] Readiness Indicator file check\\\\n2025-12-13T07:05:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jw9l4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-h7vxp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.772439 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.772511 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.772522 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.772541 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.772595 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.772828 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v25b7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8f3658c4-6596-4806-936b-1367da3bfb0e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d5b849666e5c24386d5960a26e77d0fe44bea2ea81b2559426c6c1a3c3f8013\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0aa3342644b36b9b3a117fe26220733129b14e6e90a485f91f913b6d72a78aa5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fa56c4a561b734b3704c2bf007ca4dc2ef072725fd9ca24868bd65274570c82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23d08a89e4b7ccecb48865fa681a097a2712880ee2ec618313305376d0965be0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75c8813b8f41cd7aada9e3dd20d38314397cc22b5793e234496262a2f7c3b6f9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f058824d4f238daca8ea655a5347e3beb3409656e26569ac0a459615f80df469\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc1c655ad4030ecd6ab9474c022f4ed959b1fda6452573027a8bb6e83dd6dacc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9lfj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:41Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v25b7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.786768 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4pmlw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2e3d2e4-4723-4ec9-bf01-ac029216fa91\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5be23d41d075351bbf396bd5cf684094409cb151971a2b8e379e3b849585208d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qr26g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4pmlw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.797003 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdafad9b-003c-4e26-91ba-8593a8ec6252\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f82045ac8c1cab00b6d1480e0b62bf93bc7d879cf9ce653bae848f7aa326f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b3e5f3fb865508e465ccd37086390bf7271e22c5d1c89e405392f1904283a33b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.806659 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4bfb53a0-3ab3-4782-a022-be1618e20511\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:05:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a5e6fb86d3842b9b963b5e458dab0eaf384c9d125d476a522b7510eff6916446\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://322f172bd0b9c19683a7af586a7f33610a7e6126d82c73056af895601513ca0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a932f4e3e6fe5266401c35693e55baa63300c701741ea3d2848a33c9a7f762b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b819b76561b4d4f964b31a3e9c54686189e5f866380f4911385ce168d8ab0975\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-13T07:04:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-13T07:04:16Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-13T07:04:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.817055 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://065d12b49fca7cd64511b08a07c19bb5a0535d7c2bdf7e7250f4d31f3a9684d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f34f414f10a9a377b8de97f246d5363ec01dad6d47bc8f83dc09f6e8a019e966\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-13T07:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.827748 4733 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-13T07:04:33Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-13T07:05:35Z is after 2025-08-24T17:21:41Z" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.874646 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.874710 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.874734 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.874765 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.874789 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.977915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.977959 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.977969 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.977986 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:35 crc kubenswrapper[4733]: I1213 07:05:35.977996 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:35Z","lastTransitionTime":"2025-12-13T07:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.080458 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.080485 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.080495 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.080507 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.080515 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.183448 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.183494 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.183505 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.183523 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.183535 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.285736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.285768 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.285776 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.285791 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.285809 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.388336 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.388373 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.388382 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.388397 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.388408 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.490512 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.490553 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.490562 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.490575 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.490585 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.592936 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.593050 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.593066 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.593086 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.593097 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.593824 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:36 crc kubenswrapper[4733]: E1213 07:05:36.593955 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.695095 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.695129 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.695140 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.695155 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.695166 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.797984 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.798020 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.798028 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.798041 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.798053 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.900335 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.900381 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.900399 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.900419 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:36 crc kubenswrapper[4733]: I1213 07:05:36.900431 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:36Z","lastTransitionTime":"2025-12-13T07:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.002996 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.003032 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.003042 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.003056 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.003068 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.105417 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.105464 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.105475 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.105489 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.105500 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.206909 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.206941 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.206949 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.206962 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.206971 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.309960 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.310021 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.310039 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.310061 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.310082 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.412987 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.413048 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.413069 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.413095 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.413118 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.516068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.516109 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.516122 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.516138 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.516150 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.594527 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.594604 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.594653 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.594731 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.594802 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.594853 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.617968 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.618031 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.618042 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.618078 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.618090 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.720663 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.720723 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.720733 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.720750 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.720759 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.754561 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.754676 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.754734 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.754699329 +0000 UTC m=+146.365929852 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.754784 4733 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.754795 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.754834 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.754820692 +0000 UTC m=+146.366051175 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.754979 4733 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.755093 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.755065059 +0000 UTC m=+146.366295602 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.823188 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.823255 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.823268 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.823285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.823314 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.855679 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.855714 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855840 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855848 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855855 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855863 4733 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855868 4733 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855873 4733 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855913 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.855901145 +0000 UTC m=+146.467131618 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:37 crc kubenswrapper[4733]: E1213 07:05:37.855925 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.855920746 +0000 UTC m=+146.467151229 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.925721 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.925788 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.925801 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.925818 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:37 crc kubenswrapper[4733]: I1213 07:05:37.925852 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:37Z","lastTransitionTime":"2025-12-13T07:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.028404 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.028437 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.028447 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.028463 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.028473 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.131155 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.131196 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.131238 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.131259 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.131269 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.233445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.233484 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.233498 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.233511 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.233519 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.336797 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.336835 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.336844 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.336860 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.336871 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.440000 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.440050 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.440066 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.440085 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.440099 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.541840 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.541897 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.541909 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.541930 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.541941 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.594521 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:38 crc kubenswrapper[4733]: E1213 07:05:38.594672 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.644710 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.644747 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.644755 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.644769 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.644812 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.747249 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.747349 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.747378 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.747395 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.747436 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.850306 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.850382 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.850400 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.850447 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.850466 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.953116 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.953182 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.953202 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.953264 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:38 crc kubenswrapper[4733]: I1213 07:05:38.953282 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:38Z","lastTransitionTime":"2025-12-13T07:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.057310 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.057382 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.057403 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.057427 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.057446 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.160739 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.160906 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.160929 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.160994 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.161015 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.264353 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.264423 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.264445 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.264471 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.264492 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.367096 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.367152 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.367163 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.367178 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.367188 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.470365 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.470409 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.470421 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.470437 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.470447 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.572557 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.572600 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.572609 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.572625 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.572637 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.594336 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.594402 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:39 crc kubenswrapper[4733]: E1213 07:05:39.594470 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.594543 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:39 crc kubenswrapper[4733]: E1213 07:05:39.594691 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:39 crc kubenswrapper[4733]: E1213 07:05:39.594753 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.675842 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.675915 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.675938 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.675966 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.675991 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.778928 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.778989 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.779005 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.779027 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.779044 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.880736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.880787 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.880800 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.880818 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.880830 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.982327 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.982379 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.982396 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.982418 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:39 crc kubenswrapper[4733]: I1213 07:05:39.982433 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:39Z","lastTransitionTime":"2025-12-13T07:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.084745 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.084807 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.084821 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.084841 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.084853 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.187137 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.187178 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.187186 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.187199 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.187232 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.289847 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.289893 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.289905 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.289923 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.289936 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.392266 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.392315 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.392329 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.392352 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.392367 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.494760 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.494795 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.494803 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.494818 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.494837 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.594424 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:40 crc kubenswrapper[4733]: E1213 07:05:40.594552 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.597181 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.597261 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.597272 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.597290 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.597300 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.699807 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.699867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.699878 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.699894 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.699905 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.802096 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.802136 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.802145 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.802159 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.802168 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.903976 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.904020 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.904042 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.904068 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:40 crc kubenswrapper[4733]: I1213 07:05:40.904080 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:40Z","lastTransitionTime":"2025-12-13T07:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.005815 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.005845 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.005853 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.005867 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.005876 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.108594 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.108640 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.108650 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.108666 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.108678 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.212289 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.212322 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.212331 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.212346 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.212355 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.314652 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.314685 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.314694 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.314709 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.314720 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.418313 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.418350 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.418358 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.418375 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.418384 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.521012 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.521050 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.521094 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.521113 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.521121 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.594416 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.594474 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:41 crc kubenswrapper[4733]: E1213 07:05:41.594559 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.594618 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:41 crc kubenswrapper[4733]: E1213 07:05:41.594805 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:41 crc kubenswrapper[4733]: E1213 07:05:41.595013 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.624503 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.624534 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.624542 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.624555 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.624565 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.727048 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.727092 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.727108 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.727124 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.727160 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.829574 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.829606 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.829616 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.829630 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.829638 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.933015 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.933097 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.933118 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.933141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:41 crc kubenswrapper[4733]: I1213 07:05:41.933157 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:41Z","lastTransitionTime":"2025-12-13T07:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.035903 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.035936 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.035945 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.035960 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.035968 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.138971 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.139029 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.139046 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.139070 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.139086 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.242200 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.242275 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.242288 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.242306 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.242318 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.344863 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.344921 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.344938 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.344961 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.344978 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.446963 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.447001 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.447014 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.447032 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.447044 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.549646 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.549687 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.549698 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.549716 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.549729 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.594057 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:42 crc kubenswrapper[4733]: E1213 07:05:42.594355 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.652690 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.652740 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.652753 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.652771 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.652785 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.755690 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.755735 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.755745 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.755762 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.755798 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.859141 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.859204 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.859241 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.859263 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.859280 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.961946 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.962026 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.962041 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.962059 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:42 crc kubenswrapper[4733]: I1213 07:05:42.962072 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:42Z","lastTransitionTime":"2025-12-13T07:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.064958 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.065002 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.065014 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.065033 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.065045 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.171713 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.171798 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.171809 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.171825 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.171838 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.263589 4733 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 1.454862188s: [/var/lib/containers/storage/overlay/0f8427800a5d4137909c2b071777dc68021a733b5373c09d36c6dc9577ef76ce/diff /var/log/pods/openshift-dns_node-resolver-mn7vx_abd0f65b-5959-4a7c-b89d-d22d56c6b6fc/dns-node-resolver/0.log]; will not log again for this container unless duration exceeds 2s Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.275128 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.275252 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.275282 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.275312 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.275335 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.377397 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.377444 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.377455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.377474 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.377485 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.480422 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.480476 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.480489 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.480506 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.480517 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.583126 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.583168 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.583177 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.583193 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.583224 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.594389 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.594476 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:43 crc kubenswrapper[4733]: E1213 07:05:43.594543 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.594708 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:43 crc kubenswrapper[4733]: E1213 07:05:43.594848 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:43 crc kubenswrapper[4733]: E1213 07:05:43.594976 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.685223 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.685253 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.685261 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.685274 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.685282 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.787696 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.787727 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.787736 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.787749 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.787757 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.889714 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.889760 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.889779 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.889797 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.889808 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.991807 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.991852 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.991863 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.991878 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:43 crc kubenswrapper[4733]: I1213 07:05:43.991892 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:43Z","lastTransitionTime":"2025-12-13T07:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.094154 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.094249 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.094263 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.094278 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.094290 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.199572 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.199630 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.199644 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.199666 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.199686 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.302153 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.302247 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.302303 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.302334 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.302357 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.404767 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.404840 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.404855 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.404873 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.404887 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.508309 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.508357 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.508370 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.508389 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.508401 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.593743 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:44 crc kubenswrapper[4733]: E1213 07:05:44.593984 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.610448 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.610490 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.610501 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.610517 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.610526 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.714366 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.714463 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.714479 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.714505 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.714523 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.817240 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.817285 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.817296 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.817311 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.817320 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.919654 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.919698 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.919707 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.919722 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:44 crc kubenswrapper[4733]: I1213 07:05:44.919731 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:44Z","lastTransitionTime":"2025-12-13T07:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.022023 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.022084 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.022097 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.022117 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.022135 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:45Z","lastTransitionTime":"2025-12-13T07:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.125061 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.125107 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.125120 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.125135 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.125146 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:45Z","lastTransitionTime":"2025-12-13T07:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.226811 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.226866 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.226883 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.226906 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.226919 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:45Z","lastTransitionTime":"2025-12-13T07:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.329390 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.329455 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.329470 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.329491 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.329504 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:45Z","lastTransitionTime":"2025-12-13T07:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.432072 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.432111 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.432123 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.432137 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.432148 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:45Z","lastTransitionTime":"2025-12-13T07:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.534767 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.534806 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.534818 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.534832 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.534843 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:45Z","lastTransitionTime":"2025-12-13T07:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.593664 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.593716 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:45 crc kubenswrapper[4733]: E1213 07:05:45.593789 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.593664 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:45 crc kubenswrapper[4733]: E1213 07:05:45.593953 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:45 crc kubenswrapper[4733]: E1213 07:05:45.594073 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.615096 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.615164 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.615176 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.615191 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.615201 4733 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-13T07:05:45Z","lastTransitionTime":"2025-12-13T07:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.631631 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=72.63159712 podStartE2EDuration="1m12.63159712s" podCreationTimestamp="2025-12-13 07:04:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.619315282 +0000 UTC m=+90.230545785" watchObservedRunningTime="2025-12-13 07:05:45.63159712 +0000 UTC m=+90.242827603" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.663103 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck"] Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.663528 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.665569 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.665649 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.666072 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.666916 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.693745 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podStartSLOduration=65.693722858 podStartE2EDuration="1m5.693722858s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.693356788 +0000 UTC m=+90.304587301" watchObservedRunningTime="2025-12-13 07:05:45.693722858 +0000 UTC m=+90.304953341" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.708203 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=66.708177169 podStartE2EDuration="1m6.708177169s" podCreationTimestamp="2025-12-13 07:04:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.708075486 +0000 UTC m=+90.319305979" watchObservedRunningTime="2025-12-13 07:05:45.708177169 +0000 UTC m=+90.319407642" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.756387 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fee1560-d157-4574-aa0e-2765597ab882-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.756641 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3fee1560-d157-4574-aa0e-2765597ab882-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.756723 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3fee1560-d157-4574-aa0e-2765597ab882-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.756820 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fee1560-d157-4574-aa0e-2765597ab882-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.756912 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fee1560-d157-4574-aa0e-2765597ab882-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.783509 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-mn7vx" podStartSLOduration=65.783491082 podStartE2EDuration="1m5.783491082s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.766478097 +0000 UTC m=+90.377708610" watchObservedRunningTime="2025-12-13 07:05:45.783491082 +0000 UTC m=+90.394721565" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.794929 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-v25b7" podStartSLOduration=65.794905664 podStartE2EDuration="1m5.794905664s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.783866603 +0000 UTC m=+90.395097096" watchObservedRunningTime="2025-12-13 07:05:45.794905664 +0000 UTC m=+90.406136147" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.810980 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-4pmlw" podStartSLOduration=65.810962832 podStartE2EDuration="1m5.810962832s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.795673697 +0000 UTC m=+90.406904180" watchObservedRunningTime="2025-12-13 07:05:45.810962832 +0000 UTC m=+90.422193315" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.811168 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=21.811163178 podStartE2EDuration="21.811163178s" podCreationTimestamp="2025-12-13 07:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.810191019 +0000 UTC m=+90.421421502" watchObservedRunningTime="2025-12-13 07:05:45.811163178 +0000 UTC m=+90.422393661" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.822917 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.822899319 podStartE2EDuration="37.822899319s" podCreationTimestamp="2025-12-13 07:05:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.822460077 +0000 UTC m=+90.433690560" watchObservedRunningTime="2025-12-13 07:05:45.822899319 +0000 UTC m=+90.434129802" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.857437 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3fee1560-d157-4574-aa0e-2765597ab882-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.857726 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fee1560-d157-4574-aa0e-2765597ab882-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.857853 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fee1560-d157-4574-aa0e-2765597ab882-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.857948 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fee1560-d157-4574-aa0e-2765597ab882-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.858059 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3fee1560-d157-4574-aa0e-2765597ab882-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.857547 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3fee1560-d157-4574-aa0e-2765597ab882-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.858578 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3fee1560-d157-4574-aa0e-2765597ab882-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.859313 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fee1560-d157-4574-aa0e-2765597ab882-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.863467 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fee1560-d157-4574-aa0e-2765597ab882-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.875616 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fee1560-d157-4574-aa0e-2765597ab882-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7h4ck\" (UID: \"3fee1560-d157-4574-aa0e-2765597ab882\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.895249 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=72.895225175 podStartE2EDuration="1m12.895225175s" podCreationTimestamp="2025-12-13 07:04:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.893915587 +0000 UTC m=+90.505146070" watchObservedRunningTime="2025-12-13 07:05:45.895225175 +0000 UTC m=+90.506455658" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.895637 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-h7vxp" podStartSLOduration=65.895630527 podStartE2EDuration="1m5.895630527s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.871490124 +0000 UTC m=+90.482720617" watchObservedRunningTime="2025-12-13 07:05:45.895630527 +0000 UTC m=+90.506861010" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.907293 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vwcvd" podStartSLOduration=64.907265336 podStartE2EDuration="1m4.907265336s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:45.906504744 +0000 UTC m=+90.517735227" watchObservedRunningTime="2025-12-13 07:05:45.907265336 +0000 UTC m=+90.518495829" Dec 13 07:05:45 crc kubenswrapper[4733]: I1213 07:05:45.976593 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" Dec 13 07:05:46 crc kubenswrapper[4733]: I1213 07:05:46.231066 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" event={"ID":"3fee1560-d157-4574-aa0e-2765597ab882","Type":"ContainerStarted","Data":"3a7fb5a82de40165cc0da1002e22aa4177cae656749c6aed2ba927f932a53827"} Dec 13 07:05:46 crc kubenswrapper[4733]: I1213 07:05:46.594278 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:46 crc kubenswrapper[4733]: E1213 07:05:46.594429 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:46 crc kubenswrapper[4733]: I1213 07:05:46.595808 4733 scope.go:117] "RemoveContainer" containerID="863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63" Dec 13 07:05:46 crc kubenswrapper[4733]: E1213 07:05:46.596087 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:05:47 crc kubenswrapper[4733]: I1213 07:05:47.594128 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:47 crc kubenswrapper[4733]: I1213 07:05:47.594160 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:47 crc kubenswrapper[4733]: I1213 07:05:47.594196 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:47 crc kubenswrapper[4733]: E1213 07:05:47.594286 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:47 crc kubenswrapper[4733]: E1213 07:05:47.594425 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:47 crc kubenswrapper[4733]: E1213 07:05:47.594562 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:48 crc kubenswrapper[4733]: I1213 07:05:48.238266 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" event={"ID":"3fee1560-d157-4574-aa0e-2765597ab882","Type":"ContainerStarted","Data":"54607d3bd0a9b2a3d58c8c2ae0be760fc9665d6c5b698f75f9168f91dbd78808"} Dec 13 07:05:48 crc kubenswrapper[4733]: I1213 07:05:48.251510 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7h4ck" podStartSLOduration=68.25149471 podStartE2EDuration="1m8.25149471s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:05:48.251096458 +0000 UTC m=+92.862326941" watchObservedRunningTime="2025-12-13 07:05:48.25149471 +0000 UTC m=+92.862725193" Dec 13 07:05:48 crc kubenswrapper[4733]: I1213 07:05:48.594021 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:48 crc kubenswrapper[4733]: E1213 07:05:48.594760 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:49 crc kubenswrapper[4733]: I1213 07:05:49.594098 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:49 crc kubenswrapper[4733]: I1213 07:05:49.594154 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:49 crc kubenswrapper[4733]: E1213 07:05:49.594300 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:49 crc kubenswrapper[4733]: I1213 07:05:49.594505 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:49 crc kubenswrapper[4733]: E1213 07:05:49.594753 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:49 crc kubenswrapper[4733]: E1213 07:05:49.594927 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:50 crc kubenswrapper[4733]: I1213 07:05:50.594564 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:50 crc kubenswrapper[4733]: E1213 07:05:50.594862 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:51 crc kubenswrapper[4733]: I1213 07:05:51.594658 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:51 crc kubenswrapper[4733]: I1213 07:05:51.594688 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:51 crc kubenswrapper[4733]: I1213 07:05:51.594693 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:51 crc kubenswrapper[4733]: E1213 07:05:51.594805 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:51 crc kubenswrapper[4733]: E1213 07:05:51.594870 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:51 crc kubenswrapper[4733]: E1213 07:05:51.594940 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:52 crc kubenswrapper[4733]: I1213 07:05:52.594514 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:52 crc kubenswrapper[4733]: E1213 07:05:52.594786 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:53 crc kubenswrapper[4733]: I1213 07:05:53.593785 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:53 crc kubenswrapper[4733]: I1213 07:05:53.593916 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:53 crc kubenswrapper[4733]: I1213 07:05:53.593792 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:53 crc kubenswrapper[4733]: E1213 07:05:53.594044 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:53 crc kubenswrapper[4733]: E1213 07:05:53.594148 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:53 crc kubenswrapper[4733]: E1213 07:05:53.594318 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:54 crc kubenswrapper[4733]: I1213 07:05:54.594413 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:54 crc kubenswrapper[4733]: E1213 07:05:54.594563 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:55 crc kubenswrapper[4733]: I1213 07:05:55.594523 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:55 crc kubenswrapper[4733]: I1213 07:05:55.594594 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:55 crc kubenswrapper[4733]: I1213 07:05:55.594661 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:55 crc kubenswrapper[4733]: E1213 07:05:55.595812 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:55 crc kubenswrapper[4733]: E1213 07:05:55.595915 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:55 crc kubenswrapper[4733]: E1213 07:05:55.596122 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:56 crc kubenswrapper[4733]: I1213 07:05:56.594022 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:56 crc kubenswrapper[4733]: E1213 07:05:56.594163 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:57 crc kubenswrapper[4733]: I1213 07:05:57.593966 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:57 crc kubenswrapper[4733]: I1213 07:05:57.594030 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:57 crc kubenswrapper[4733]: I1213 07:05:57.594051 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:57 crc kubenswrapper[4733]: E1213 07:05:57.594137 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:57 crc kubenswrapper[4733]: E1213 07:05:57.594277 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:57 crc kubenswrapper[4733]: E1213 07:05:57.594449 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:05:58 crc kubenswrapper[4733]: I1213 07:05:58.593865 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:58 crc kubenswrapper[4733]: E1213 07:05:58.594010 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:05:58 crc kubenswrapper[4733]: I1213 07:05:58.889671 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:05:58 crc kubenswrapper[4733]: E1213 07:05:58.889842 4733 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:58 crc kubenswrapper[4733]: E1213 07:05:58.889952 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs podName:868ca71b-6aae-4054-8b03-cf7c2b3499cd nodeName:}" failed. No retries permitted until 2025-12-13 07:07:02.889931705 +0000 UTC m=+167.501162188 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs") pod "network-metrics-daemon-r5dfg" (UID: "868ca71b-6aae-4054-8b03-cf7c2b3499cd") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 13 07:05:59 crc kubenswrapper[4733]: I1213 07:05:59.594111 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:05:59 crc kubenswrapper[4733]: I1213 07:05:59.594231 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:05:59 crc kubenswrapper[4733]: I1213 07:05:59.594262 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:05:59 crc kubenswrapper[4733]: E1213 07:05:59.594343 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:05:59 crc kubenswrapper[4733]: E1213 07:05:59.594492 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:05:59 crc kubenswrapper[4733]: E1213 07:05:59.594606 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:00 crc kubenswrapper[4733]: I1213 07:06:00.594013 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:00 crc kubenswrapper[4733]: E1213 07:06:00.594148 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:01 crc kubenswrapper[4733]: I1213 07:06:01.594671 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:01 crc kubenswrapper[4733]: I1213 07:06:01.594734 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:01 crc kubenswrapper[4733]: E1213 07:06:01.594850 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:01 crc kubenswrapper[4733]: I1213 07:06:01.594882 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:01 crc kubenswrapper[4733]: E1213 07:06:01.595315 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:01 crc kubenswrapper[4733]: E1213 07:06:01.595442 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:01 crc kubenswrapper[4733]: I1213 07:06:01.595596 4733 scope.go:117] "RemoveContainer" containerID="863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63" Dec 13 07:06:01 crc kubenswrapper[4733]: E1213 07:06:01.595723 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:06:02 crc kubenswrapper[4733]: I1213 07:06:02.593777 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:02 crc kubenswrapper[4733]: E1213 07:06:02.593945 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:03 crc kubenswrapper[4733]: I1213 07:06:03.594328 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:03 crc kubenswrapper[4733]: E1213 07:06:03.594439 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:03 crc kubenswrapper[4733]: I1213 07:06:03.594589 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:03 crc kubenswrapper[4733]: E1213 07:06:03.595450 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:03 crc kubenswrapper[4733]: I1213 07:06:03.596524 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:03 crc kubenswrapper[4733]: E1213 07:06:03.596663 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:04 crc kubenswrapper[4733]: I1213 07:06:04.594060 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:04 crc kubenswrapper[4733]: E1213 07:06:04.594201 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:05 crc kubenswrapper[4733]: I1213 07:06:05.593921 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:05 crc kubenswrapper[4733]: E1213 07:06:05.594932 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:05 crc kubenswrapper[4733]: I1213 07:06:05.595001 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:05 crc kubenswrapper[4733]: E1213 07:06:05.595154 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:05 crc kubenswrapper[4733]: I1213 07:06:05.595001 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:05 crc kubenswrapper[4733]: E1213 07:06:05.595274 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:06 crc kubenswrapper[4733]: I1213 07:06:06.594548 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:06 crc kubenswrapper[4733]: E1213 07:06:06.594702 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:07 crc kubenswrapper[4733]: I1213 07:06:07.594235 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:07 crc kubenswrapper[4733]: I1213 07:06:07.594268 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:07 crc kubenswrapper[4733]: I1213 07:06:07.594408 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:07 crc kubenswrapper[4733]: E1213 07:06:07.594459 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:07 crc kubenswrapper[4733]: E1213 07:06:07.594546 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:07 crc kubenswrapper[4733]: E1213 07:06:07.594653 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:08 crc kubenswrapper[4733]: I1213 07:06:08.594459 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:08 crc kubenswrapper[4733]: E1213 07:06:08.594597 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:09 crc kubenswrapper[4733]: I1213 07:06:09.593783 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:09 crc kubenswrapper[4733]: I1213 07:06:09.593783 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:09 crc kubenswrapper[4733]: I1213 07:06:09.593929 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:09 crc kubenswrapper[4733]: E1213 07:06:09.593924 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:09 crc kubenswrapper[4733]: E1213 07:06:09.594123 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:09 crc kubenswrapper[4733]: E1213 07:06:09.594108 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:10 crc kubenswrapper[4733]: I1213 07:06:10.594531 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:10 crc kubenswrapper[4733]: E1213 07:06:10.594812 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:11 crc kubenswrapper[4733]: I1213 07:06:11.594105 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:11 crc kubenswrapper[4733]: I1213 07:06:11.594148 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:11 crc kubenswrapper[4733]: E1213 07:06:11.594304 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:11 crc kubenswrapper[4733]: E1213 07:06:11.594493 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:11 crc kubenswrapper[4733]: I1213 07:06:11.594547 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:11 crc kubenswrapper[4733]: E1213 07:06:11.594727 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:12 crc kubenswrapper[4733]: I1213 07:06:12.594521 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:12 crc kubenswrapper[4733]: E1213 07:06:12.594790 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:12 crc kubenswrapper[4733]: I1213 07:06:12.595717 4733 scope.go:117] "RemoveContainer" containerID="863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63" Dec 13 07:06:12 crc kubenswrapper[4733]: E1213 07:06:12.595943 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vsc8k_openshift-ovn-kubernetes(b6fa9235-730d-46d0-9211-563c96e073fb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podUID="b6fa9235-730d-46d0-9211-563c96e073fb" Dec 13 07:06:13 crc kubenswrapper[4733]: I1213 07:06:13.594114 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:13 crc kubenswrapper[4733]: I1213 07:06:13.594173 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:13 crc kubenswrapper[4733]: E1213 07:06:13.594289 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:13 crc kubenswrapper[4733]: I1213 07:06:13.594318 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:13 crc kubenswrapper[4733]: E1213 07:06:13.594370 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:13 crc kubenswrapper[4733]: E1213 07:06:13.594438 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:14 crc kubenswrapper[4733]: I1213 07:06:14.594121 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:14 crc kubenswrapper[4733]: E1213 07:06:14.594339 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:15 crc kubenswrapper[4733]: I1213 07:06:15.594318 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:15 crc kubenswrapper[4733]: I1213 07:06:15.594331 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:15 crc kubenswrapper[4733]: E1213 07:06:15.595251 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:15 crc kubenswrapper[4733]: I1213 07:06:15.595271 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:15 crc kubenswrapper[4733]: E1213 07:06:15.595384 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:15 crc kubenswrapper[4733]: E1213 07:06:15.595447 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:15 crc kubenswrapper[4733]: E1213 07:06:15.635303 4733 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 13 07:06:15 crc kubenswrapper[4733]: E1213 07:06:15.672757 4733 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 13 07:06:16 crc kubenswrapper[4733]: I1213 07:06:16.331569 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h7vxp_3662e179-d702-4e13-a972-a5b6ab155e2c/kube-multus/1.log" Dec 13 07:06:16 crc kubenswrapper[4733]: I1213 07:06:16.332616 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h7vxp_3662e179-d702-4e13-a972-a5b6ab155e2c/kube-multus/0.log" Dec 13 07:06:16 crc kubenswrapper[4733]: I1213 07:06:16.332680 4733 generic.go:334] "Generic (PLEG): container finished" podID="3662e179-d702-4e13-a972-a5b6ab155e2c" containerID="ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4" exitCode=1 Dec 13 07:06:16 crc kubenswrapper[4733]: I1213 07:06:16.332715 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h7vxp" event={"ID":"3662e179-d702-4e13-a972-a5b6ab155e2c","Type":"ContainerDied","Data":"ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4"} Dec 13 07:06:16 crc kubenswrapper[4733]: I1213 07:06:16.332753 4733 scope.go:117] "RemoveContainer" containerID="4e87978c7cc2c87bb09063a40abd2d87615e79f499fd0e226a8eba0c8bf6a262" Dec 13 07:06:16 crc kubenswrapper[4733]: I1213 07:06:16.333191 4733 scope.go:117] "RemoveContainer" containerID="ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4" Dec 13 07:06:16 crc kubenswrapper[4733]: E1213 07:06:16.333396 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-h7vxp_openshift-multus(3662e179-d702-4e13-a972-a5b6ab155e2c)\"" pod="openshift-multus/multus-h7vxp" podUID="3662e179-d702-4e13-a972-a5b6ab155e2c" Dec 13 07:06:16 crc kubenswrapper[4733]: I1213 07:06:16.594412 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:16 crc kubenswrapper[4733]: E1213 07:06:16.594560 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:17 crc kubenswrapper[4733]: I1213 07:06:17.337987 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h7vxp_3662e179-d702-4e13-a972-a5b6ab155e2c/kube-multus/1.log" Dec 13 07:06:17 crc kubenswrapper[4733]: I1213 07:06:17.594540 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:17 crc kubenswrapper[4733]: I1213 07:06:17.594595 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:17 crc kubenswrapper[4733]: E1213 07:06:17.594760 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:17 crc kubenswrapper[4733]: E1213 07:06:17.594909 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:17 crc kubenswrapper[4733]: I1213 07:06:17.595437 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:17 crc kubenswrapper[4733]: E1213 07:06:17.595557 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:18 crc kubenswrapper[4733]: I1213 07:06:18.594555 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:18 crc kubenswrapper[4733]: E1213 07:06:18.594680 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:19 crc kubenswrapper[4733]: I1213 07:06:19.594066 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:19 crc kubenswrapper[4733]: I1213 07:06:19.594127 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:19 crc kubenswrapper[4733]: I1213 07:06:19.594203 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:19 crc kubenswrapper[4733]: E1213 07:06:19.594199 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:19 crc kubenswrapper[4733]: E1213 07:06:19.594364 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:19 crc kubenswrapper[4733]: E1213 07:06:19.594398 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:20 crc kubenswrapper[4733]: I1213 07:06:20.594692 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:20 crc kubenswrapper[4733]: E1213 07:06:20.594839 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:20 crc kubenswrapper[4733]: E1213 07:06:20.674137 4733 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 13 07:06:21 crc kubenswrapper[4733]: I1213 07:06:21.594653 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:21 crc kubenswrapper[4733]: E1213 07:06:21.594782 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:21 crc kubenswrapper[4733]: I1213 07:06:21.594949 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:21 crc kubenswrapper[4733]: E1213 07:06:21.594990 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:21 crc kubenswrapper[4733]: I1213 07:06:21.595079 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:21 crc kubenswrapper[4733]: E1213 07:06:21.595121 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:22 crc kubenswrapper[4733]: I1213 07:06:22.593993 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:22 crc kubenswrapper[4733]: E1213 07:06:22.594130 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:23 crc kubenswrapper[4733]: I1213 07:06:23.594462 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:23 crc kubenswrapper[4733]: I1213 07:06:23.594507 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:23 crc kubenswrapper[4733]: I1213 07:06:23.594462 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:23 crc kubenswrapper[4733]: E1213 07:06:23.594661 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:23 crc kubenswrapper[4733]: E1213 07:06:23.594794 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:23 crc kubenswrapper[4733]: E1213 07:06:23.594875 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:24 crc kubenswrapper[4733]: I1213 07:06:24.594047 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:24 crc kubenswrapper[4733]: E1213 07:06:24.594201 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:25 crc kubenswrapper[4733]: I1213 07:06:25.594359 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:25 crc kubenswrapper[4733]: I1213 07:06:25.594399 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:25 crc kubenswrapper[4733]: I1213 07:06:25.594365 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:25 crc kubenswrapper[4733]: E1213 07:06:25.595339 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:25 crc kubenswrapper[4733]: E1213 07:06:25.595429 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:25 crc kubenswrapper[4733]: E1213 07:06:25.595539 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:25 crc kubenswrapper[4733]: I1213 07:06:25.596255 4733 scope.go:117] "RemoveContainer" containerID="863075cf095552dafa43c51ddc81e59501153452104ce81f7c8ab745b9d26a63" Dec 13 07:06:25 crc kubenswrapper[4733]: E1213 07:06:25.675375 4733 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 13 07:06:26 crc kubenswrapper[4733]: I1213 07:06:26.594458 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:26 crc kubenswrapper[4733]: E1213 07:06:26.595049 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:26 crc kubenswrapper[4733]: I1213 07:06:26.595616 4733 scope.go:117] "RemoveContainer" containerID="ea3a2b804721d628e8069b3b202b8f2e4cde15bd3f7721e323f5009e0cae45d4" Dec 13 07:06:27 crc kubenswrapper[4733]: I1213 07:06:27.594170 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:27 crc kubenswrapper[4733]: I1213 07:06:27.594299 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:27 crc kubenswrapper[4733]: I1213 07:06:27.594306 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:27 crc kubenswrapper[4733]: E1213 07:06:27.594483 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:27 crc kubenswrapper[4733]: E1213 07:06:27.594663 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:27 crc kubenswrapper[4733]: E1213 07:06:27.594797 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:28 crc kubenswrapper[4733]: I1213 07:06:28.376361 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vsc8k_b6fa9235-730d-46d0-9211-563c96e073fb/ovnkube-controller/3.log" Dec 13 07:06:28 crc kubenswrapper[4733]: I1213 07:06:28.379772 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" event={"ID":"b6fa9235-730d-46d0-9211-563c96e073fb","Type":"ContainerStarted","Data":"62ad2d75cf419afae9fd136679a3cc2e227059e74bbccb6b0f03ce522ad71caa"} Dec 13 07:06:28 crc kubenswrapper[4733]: I1213 07:06:28.594239 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:28 crc kubenswrapper[4733]: E1213 07:06:28.594406 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.384819 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-h7vxp_3662e179-d702-4e13-a972-a5b6ab155e2c/kube-multus/1.log" Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.384890 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-h7vxp" event={"ID":"3662e179-d702-4e13-a972-a5b6ab155e2c","Type":"ContainerStarted","Data":"0e1e52aa9d7ccd4f2f3eb3c8f137cf5e43b021fd647fafcad9f4d6f40ea1260c"} Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.385245 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.408534 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" podStartSLOduration=109.408515694 podStartE2EDuration="1m49.408515694s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:29.408433391 +0000 UTC m=+134.019663904" watchObservedRunningTime="2025-12-13 07:06:29.408515694 +0000 UTC m=+134.019746177" Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.488644 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-r5dfg"] Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.488758 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:29 crc kubenswrapper[4733]: E1213 07:06:29.488860 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.594593 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.594671 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:29 crc kubenswrapper[4733]: E1213 07:06:29.594712 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:29 crc kubenswrapper[4733]: E1213 07:06:29.594846 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:29 crc kubenswrapper[4733]: I1213 07:06:29.594857 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:29 crc kubenswrapper[4733]: E1213 07:06:29.594969 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:30 crc kubenswrapper[4733]: I1213 07:06:30.594625 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:30 crc kubenswrapper[4733]: E1213 07:06:30.594764 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:30 crc kubenswrapper[4733]: E1213 07:06:30.676397 4733 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 13 07:06:31 crc kubenswrapper[4733]: I1213 07:06:31.594162 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:31 crc kubenswrapper[4733]: I1213 07:06:31.594162 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:31 crc kubenswrapper[4733]: E1213 07:06:31.594337 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:31 crc kubenswrapper[4733]: I1213 07:06:31.594475 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:31 crc kubenswrapper[4733]: E1213 07:06:31.594559 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:31 crc kubenswrapper[4733]: E1213 07:06:31.594617 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:32 crc kubenswrapper[4733]: I1213 07:06:32.594309 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:32 crc kubenswrapper[4733]: E1213 07:06:32.594510 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:33 crc kubenswrapper[4733]: I1213 07:06:33.594157 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:33 crc kubenswrapper[4733]: E1213 07:06:33.594662 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:33 crc kubenswrapper[4733]: I1213 07:06:33.594326 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:33 crc kubenswrapper[4733]: E1213 07:06:33.594739 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:33 crc kubenswrapper[4733]: I1213 07:06:33.594159 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:33 crc kubenswrapper[4733]: E1213 07:06:33.594796 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:34 crc kubenswrapper[4733]: I1213 07:06:34.594133 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:34 crc kubenswrapper[4733]: E1213 07:06:34.594289 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r5dfg" podUID="868ca71b-6aae-4054-8b03-cf7c2b3499cd" Dec 13 07:06:35 crc kubenswrapper[4733]: I1213 07:06:35.593929 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:35 crc kubenswrapper[4733]: I1213 07:06:35.593936 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:35 crc kubenswrapper[4733]: E1213 07:06:35.595275 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 13 07:06:35 crc kubenswrapper[4733]: I1213 07:06:35.595289 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:35 crc kubenswrapper[4733]: E1213 07:06:35.595368 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 13 07:06:35 crc kubenswrapper[4733]: E1213 07:06:35.595458 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.594119 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.596938 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.597084 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.655987 4733 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.695340 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lbkq2"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.695764 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-z4czb"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.696066 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.696425 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.698552 4733 reflector.go:561] object-"openshift-machine-api"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.698694 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.698808 4733 reflector.go:561] object-"openshift-console"/"console-dockercfg-f62pw": failed to list *v1.Secret: secrets "console-dockercfg-f62pw" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.698882 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"console-dockercfg-f62pw\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"console-dockercfg-f62pw\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.698992 4733 reflector.go:561] object-"openshift-console"/"console-serving-cert": failed to list *v1.Secret: secrets "console-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.699059 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"console-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"console-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.700495 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.701019 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-r59jx"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.701311 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sxsq6"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.701657 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.702094 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.702449 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.703971 4733 reflector.go:561] object-"openshift-console"/"console-config": failed to list *v1.ConfigMap: configmaps "console-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.704091 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"console-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"console-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.704223 4733 reflector.go:561] object-"openshift-console"/"console-oauth-config": failed to list *v1.Secret: secrets "console-oauth-config" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.704303 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd"] Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.704436 4733 reflector.go:561] object-"openshift-console"/"trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.704511 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.704645 4733 reflector.go:561] object-"openshift-console"/"service-ca": failed to list *v1.ConfigMap: configmaps "service-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.704724 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.704786 4733 reflector.go:561] object-"openshift-console"/"oauth-serving-cert": failed to list *v1.ConfigMap: configmaps "oauth-serving-cert" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.704863 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"oauth-serving-cert\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"oauth-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.704667 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7"] Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.704302 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"console-oauth-config\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"console-oauth-config\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.705390 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kzjdj"] Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705468 4733 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705511 4733 reflector.go:561] object-"openshift-console-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705529 4733 reflector.go:561] object-"openshift-console-operator"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705550 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705524 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705565 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705597 4733 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-images": failed to list *v1.ConfigMap: configmaps "machine-api-operator-images" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705610 4733 reflector.go:561] object-"openshift-console"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.704727 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"service-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"service-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705641 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705612 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-images\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-api-operator-images\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705661 4733 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705679 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705703 4733 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705724 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.705776 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.705914 4733 reflector.go:561] object-"openshift-machine-api"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.705944 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.706009 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.708581 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.708960 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.709279 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.709333 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.709391 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.709929 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.712683 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.713310 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.714413 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jsklp"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.714812 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.715278 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jwh8g"] Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.715476 4733 reflector.go:561] object-"openshift-console"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.715514 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.715550 4733 reflector.go:561] object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr": failed to list *v1.Secret: secrets "console-operator-dockercfg-4xjcr" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.715561 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"console-operator-dockercfg-4xjcr\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"console-operator-dockercfg-4xjcr\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.715687 4733 reflector.go:561] object-"openshift-console-operator"/"trusted-ca": failed to list *v1.ConfigMap: configmaps "trusted-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.715714 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"trusted-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.715743 4733 reflector.go:561] object-"openshift-console-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.715752 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.715801 4733 reflector.go:561] object-"openshift-console-operator"/"console-operator-config": failed to list *v1.ConfigMap: configmaps "console-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.715815 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"console-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"console-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.716123 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.716127 4733 reflector.go:561] object-"openshift-console"/"default-dockercfg-chnjx": failed to list *v1.Secret: secrets "default-dockercfg-chnjx" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.716590 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"default-dockercfg-chnjx\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-chnjx\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.716848 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xqvh8"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.717385 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.743487 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-278t5"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.750816 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.751397 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-84p49"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.752016 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.752414 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.752592 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.753203 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.753417 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.753204 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.753644 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.754314 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.754427 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.754573 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.754812 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.755562 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vf8wt"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.756247 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.756671 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.757237 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.758265 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.758916 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.759263 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.759612 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.759722 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.759890 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.759934 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.759991 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.762912 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.761181 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-2qw2l"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.762947 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.763007 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.763013 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.763080 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.763114 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.763175 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.763218 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.766434 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.766515 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.766555 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.766641 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.766811 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.766947 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.767062 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.767175 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.767375 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.767528 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.767616 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.767734 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.767800 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.768461 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.768821 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.769020 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.769055 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.769147 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.769482 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.774505 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.774745 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.782313 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 13 07:06:36 crc kubenswrapper[4733]: W1213 07:06:36.782661 4733 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config": failed to list *v1.ConfigMap: configmaps "openshift-apiserver-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 13 07:06:36 crc kubenswrapper[4733]: E1213 07:06:36.782765 4733 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-apiserver-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.782896 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.783050 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.783230 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.783386 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.783535 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.784309 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.784457 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.784656 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.784787 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.784908 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.785043 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.785244 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.785397 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.785525 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.785649 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786028 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786231 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786408 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786424 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786598 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786556 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786848 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786919 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.787071 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.786860 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.787896 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.789158 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.789893 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.816631 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcbs2\" (UniqueName: \"kubernetes.io/projected/351d859b-8f7e-4adc-9a55-10615cd158fc-kube-api-access-wcbs2\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.816792 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-config\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.816870 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d8abbbbe-5215-4443-8824-dec6f2a1c89e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817029 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/351d859b-8f7e-4adc-9a55-10615cd158fc-config\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817205 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-tmpfs\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817370 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a41e126a-492d-4340-bb48-772b1364fd25-signing-cabundle\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817498 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-config\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817520 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817572 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d8abbbbe-5215-4443-8824-dec6f2a1c89e-proxy-tls\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817626 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-node-pullsecrets\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817675 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-serving-cert\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817733 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-client-ca\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817826 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv66t\" (UniqueName: \"kubernetes.io/projected/a41e126a-492d-4340-bb48-772b1364fd25-kube-api-access-gv66t\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817848 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-image-import-ca\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817870 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1412595e-0064-4407-89a2-51b8e97f49d7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817966 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2nzd\" (UniqueName: \"kubernetes.io/projected/3b51be9d-879f-441e-bb5e-baae8f6c8378-kube-api-access-m2nzd\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.817986 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw77h\" (UniqueName: \"kubernetes.io/projected/114a8849-0be4-4fde-ada0-9d8ebf304ede-kube-api-access-hw77h\") pod \"dns-operator-744455d44c-vf8wt\" (UID: \"114a8849-0be4-4fde-ada0-9d8ebf304ede\") " pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.818039 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmqgv\" (UniqueName: \"kubernetes.io/projected/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-kube-api-access-pmqgv\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.818131 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1412595e-0064-4407-89a2-51b8e97f49d7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.818155 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs7nq\" (UniqueName: \"kubernetes.io/projected/1412595e-0064-4407-89a2-51b8e97f49d7-kube-api-access-hs7nq\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.822914 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.824028 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.826579 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f4rl\" (UniqueName: \"kubernetes.io/projected/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-kube-api-access-8f4rl\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.826761 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/114a8849-0be4-4fde-ada0-9d8ebf304ede-metrics-tls\") pod \"dns-operator-744455d44c-vf8wt\" (UID: \"114a8849-0be4-4fde-ada0-9d8ebf304ede\") " pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.827926 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.830365 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.841922 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-audit-dir\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842017 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842049 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b51be9d-879f-441e-bb5e-baae8f6c8378-serving-cert\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842076 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb74x\" (UniqueName: \"kubernetes.io/projected/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-kube-api-access-xb74x\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842104 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-apiservice-cert\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842149 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-config\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842171 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-audit-policies\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842195 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a41e126a-492d-4340-bb48-772b1364fd25-signing-key\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842252 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/351d859b-8f7e-4adc-9a55-10615cd158fc-machine-approver-tls\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842275 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znmg5\" (UniqueName: \"kubernetes.io/projected/d8abbbbe-5215-4443-8824-dec6f2a1c89e-kube-api-access-znmg5\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842312 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-serving-cert\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842342 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-audit\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842364 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-service-ca-bundle\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842380 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/351d859b-8f7e-4adc-9a55-10615cd158fc-auth-proxy-config\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842404 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-encryption-config\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842420 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-etcd-client\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842435 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-etcd-serving-ca\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.842449 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-webhook-cert\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.844650 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.844894 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cbq4r"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.845202 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.846556 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.846946 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.847691 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.848111 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.848362 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.849978 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.853589 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.854043 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.858453 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.858728 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.860810 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.872240 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.872570 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.873067 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.873090 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.873321 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.873591 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.873681 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.873790 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.873818 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.878772 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.879631 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.883305 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-975kj"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.884151 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.884905 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.885601 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.887646 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.888249 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.888379 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.892350 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.892860 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.899082 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5dwn"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.899767 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.899805 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.900442 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.904402 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nlspd"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.904574 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.905071 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lbkq2"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.905107 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.905188 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.910450 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-r59jx"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.921161 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.923405 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.923588 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.929459 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.931260 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.938347 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sxsq6"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.939513 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kzjdj"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.943725 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-etcd-client\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.943766 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.943881 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-serving-cert\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.943901 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jsklp"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.944866 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlbjk\" (UniqueName: \"kubernetes.io/projected/02e53589-ca3a-41cc-be79-94b5453bf411-kube-api-access-nlbjk\") pod \"downloads-7954f5f757-r59jx\" (UID: \"02e53589-ca3a-41cc-be79-94b5453bf411\") " pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.944953 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/351d859b-8f7e-4adc-9a55-10615cd158fc-auth-proxy-config\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.946425 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.946533 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/351d859b-8f7e-4adc-9a55-10615cd158fc-auth-proxy-config\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.946572 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-84p49"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.946643 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-audit\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.946776 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-service-ca-bundle\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.952983 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-audit\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.953733 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-service-ca-bundle\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.946801 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-etcd-client\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958506 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-encryption-config\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958554 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-encryption-config\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958562 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958598 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958633 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958669 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-etcd-serving-ca\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958700 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-webhook-cert\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958732 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcbs2\" (UniqueName: \"kubernetes.io/projected/351d859b-8f7e-4adc-9a55-10615cd158fc-kube-api-access-wcbs2\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958770 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958809 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-service-ca-bundle\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958837 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/351d859b-8f7e-4adc-9a55-10615cd158fc-config\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958871 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-config\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958905 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d8abbbbe-5215-4443-8824-dec6f2a1c89e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958938 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958969 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a41e126a-492d-4340-bb48-772b1364fd25-signing-cabundle\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.958997 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-tmpfs\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959026 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959057 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2l48\" (UniqueName: \"kubernetes.io/projected/875adea7-a638-4064-a466-2ab4e37771e3-kube-api-access-x2l48\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959091 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-config\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959122 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d8abbbbe-5215-4443-8824-dec6f2a1c89e-proxy-tls\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959161 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-node-pullsecrets\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959196 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-default-certificate\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959283 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-serving-cert\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959307 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-client-ca\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959337 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv66t\" (UniqueName: \"kubernetes.io/projected/a41e126a-492d-4340-bb48-772b1364fd25-kube-api-access-gv66t\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959383 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-image-import-ca\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959408 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1412595e-0064-4407-89a2-51b8e97f49d7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959438 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a860e32-a83a-403e-aed7-a3e703a0b0cb-audit-dir\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959466 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89jht\" (UniqueName: \"kubernetes.io/projected/5a860e32-a83a-403e-aed7-a3e703a0b0cb-kube-api-access-89jht\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959498 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2nzd\" (UniqueName: \"kubernetes.io/projected/3b51be9d-879f-441e-bb5e-baae8f6c8378-kube-api-access-m2nzd\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959524 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw77h\" (UniqueName: \"kubernetes.io/projected/114a8849-0be4-4fde-ada0-9d8ebf304ede-kube-api-access-hw77h\") pod \"dns-operator-744455d44c-vf8wt\" (UID: \"114a8849-0be4-4fde-ada0-9d8ebf304ede\") " pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959553 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmqgv\" (UniqueName: \"kubernetes.io/projected/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-kube-api-access-pmqgv\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959582 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1412595e-0064-4407-89a2-51b8e97f49d7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959610 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs7nq\" (UniqueName: \"kubernetes.io/projected/1412595e-0064-4407-89a2-51b8e97f49d7-kube-api-access-hs7nq\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959642 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-stats-auth\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959667 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjb26\" (UniqueName: \"kubernetes.io/projected/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-kube-api-access-xjb26\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959711 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959742 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959773 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f4rl\" (UniqueName: \"kubernetes.io/projected/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-kube-api-access-8f4rl\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959798 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/114a8849-0be4-4fde-ada0-9d8ebf304ede-metrics-tls\") pod \"dns-operator-744455d44c-vf8wt\" (UID: \"114a8849-0be4-4fde-ada0-9d8ebf304ede\") " pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959823 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-audit-policies\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959845 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959870 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959900 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959925 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-audit-dir\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.959960 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960032 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960072 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b51be9d-879f-441e-bb5e-baae8f6c8378-serving-cert\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960118 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb74x\" (UniqueName: \"kubernetes.io/projected/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-kube-api-access-xb74x\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960149 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/875adea7-a638-4064-a466-2ab4e37771e3-audit-dir\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960176 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-apiservice-cert\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960194 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960244 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960294 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-audit-policies\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960323 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-serving-cert\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960346 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-config\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960379 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a41e126a-492d-4340-bb48-772b1364fd25-signing-key\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960432 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960460 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-metrics-certs\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960492 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/351d859b-8f7e-4adc-9a55-10615cd158fc-machine-approver-tls\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960527 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znmg5\" (UniqueName: \"kubernetes.io/projected/d8abbbbe-5215-4443-8824-dec6f2a1c89e-kube-api-access-znmg5\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.960501 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-serving-cert\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.961557 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/351d859b-8f7e-4adc-9a55-10615cd158fc-config\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.962923 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.963065 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-config\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.963315 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-encryption-config\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.965744 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d8abbbbe-5215-4443-8824-dec6f2a1c89e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.965991 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-node-pullsecrets\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966333 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-config\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966346 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-config\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966374 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a41e126a-492d-4340-bb48-772b1364fd25-signing-cabundle\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966574 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-audit-policies\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966631 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1412595e-0064-4407-89a2-51b8e97f49d7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966652 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966784 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-tmpfs\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966821 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-etcd-serving-ca\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.966898 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-audit-dir\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.967283 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.967275 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b51be9d-879f-441e-bb5e-baae8f6c8378-serving-cert\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.968687 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-client-ca\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.968977 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-image-import-ca\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.969024 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3b51be9d-879f-441e-bb5e-baae8f6c8378-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.969544 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d8abbbbe-5215-4443-8824-dec6f2a1c89e-proxy-tls\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.969575 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-etcd-client\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.969691 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/351d859b-8f7e-4adc-9a55-10615cd158fc-machine-approver-tls\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.969732 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-975kj"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.969929 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.970141 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a41e126a-492d-4340-bb48-772b1364fd25-signing-key\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.970813 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-serving-cert\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.973237 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.975364 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-278t5"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.977975 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vf8wt"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.978282 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.979783 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9p8cj"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.980756 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.983421 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.984969 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jwh8g"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.985655 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g9699"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.987626 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/114a8849-0be4-4fde-ada0-9d8ebf304ede-metrics-tls\") pod \"dns-operator-744455d44c-vf8wt\" (UID: \"114a8849-0be4-4fde-ada0-9d8ebf304ede\") " pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.987699 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.988782 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cbq4r"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.990029 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.992643 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.992969 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.994511 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.996251 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xqvh8"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.997705 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7"] Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.998101 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 13 07:06:36 crc kubenswrapper[4733]: I1213 07:06:36.999183 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z4czb"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.000513 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-l9qj7"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.001125 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.002355 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.003604 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.005113 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5dwn"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.006616 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.008288 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.009501 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g9699"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.010737 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.012339 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9p8cj"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.013219 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nlspd"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.014294 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.015338 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-z8h96"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.016841 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8sw6n"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.017652 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.019555 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.019817 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-z8h96"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.020073 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.020621 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8sw6n"] Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.039803 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.059097 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061075 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061121 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061153 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-audit-policies\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061179 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061202 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061265 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/875adea7-a638-4064-a466-2ab4e37771e3-audit-dir\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061298 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061324 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061347 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-serving-cert\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061374 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061396 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-metrics-certs\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061446 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-etcd-client\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061467 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061495 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlbjk\" (UniqueName: \"kubernetes.io/projected/02e53589-ca3a-41cc-be79-94b5453bf411-kube-api-access-nlbjk\") pod \"downloads-7954f5f757-r59jx\" (UID: \"02e53589-ca3a-41cc-be79-94b5453bf411\") " pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061530 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-encryption-config\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061563 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061586 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061617 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061639 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061666 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-service-ca-bundle\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061690 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2l48\" (UniqueName: \"kubernetes.io/projected/875adea7-a638-4064-a466-2ab4e37771e3-kube-api-access-x2l48\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061719 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-default-certificate\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061781 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a860e32-a83a-403e-aed7-a3e703a0b0cb-audit-dir\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061804 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89jht\" (UniqueName: \"kubernetes.io/projected/5a860e32-a83a-403e-aed7-a3e703a0b0cb-kube-api-access-89jht\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061851 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-stats-auth\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061887 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjb26\" (UniqueName: \"kubernetes.io/projected/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-kube-api-access-xjb26\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.061922 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.062042 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.062167 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.062672 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.062733 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5a860e32-a83a-403e-aed7-a3e703a0b0cb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.062747 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5a860e32-a83a-403e-aed7-a3e703a0b0cb-audit-dir\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.062861 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/875adea7-a638-4064-a466-2ab4e37771e3-audit-dir\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.063400 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.063499 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-audit-policies\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.064616 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.065051 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.065792 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-encryption-config\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.066070 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-serving-cert\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.066424 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.066875 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5a860e32-a83a-403e-aed7-a3e703a0b0cb-etcd-client\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.067016 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.067494 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.068060 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.068449 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.075883 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.079155 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.087188 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-webhook-cert\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.087335 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-apiservice-cert\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.098711 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.118765 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.138636 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.159113 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.178285 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.199121 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.219068 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.238286 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.258996 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.278661 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.298989 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.306199 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-metrics-certs\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.318553 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.326844 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-stats-auth\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.338325 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.343951 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-service-ca-bundle\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.358995 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.365337 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-default-certificate\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.378614 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.398485 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.458260 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.478089 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.498768 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.517584 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.538494 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.558488 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.578562 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.594433 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.594675 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.594856 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.599404 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.618642 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.639472 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.659282 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.679185 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.698992 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.719864 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.738428 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.758675 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.778749 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.800451 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.819722 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.839457 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.858684 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.877199 4733 request.go:700] Waited for 1.015901327s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/secrets?fieldSelector=metadata.name%3Dcontrol-plane-machine-set-operator-dockercfg-k9rxt&limit=500&resourceVersion=0 Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.879405 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.899320 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.920188 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.939670 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 13 07:06:37 crc kubenswrapper[4733]: E1213 07:06:37.965349 4733 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 13 07:06:37 crc kubenswrapper[4733]: E1213 07:06:37.965477 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1412595e-0064-4407-89a2-51b8e97f49d7-config podName:1412595e-0064-4407-89a2-51b8e97f49d7 nodeName:}" failed. No retries permitted until 2025-12-13 07:06:38.465444491 +0000 UTC m=+143.076674984 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/1412595e-0064-4407-89a2-51b8e97f49d7-config") pod "openshift-apiserver-operator-796bbdcf4f-n66g9" (UID: "1412595e-0064-4407-89a2-51b8e97f49d7") : failed to sync configmap cache: timed out waiting for the condition Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.970948 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 13 07:06:37 crc kubenswrapper[4733]: I1213 07:06:37.979732 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.000006 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.020518 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.038840 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.059314 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.079375 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.098236 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.118451 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.139148 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.159047 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.185932 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.199116 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.219424 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.239261 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.259520 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.278075 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.298376 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.318865 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.339121 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.358870 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.378268 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.398880 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.418579 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.438866 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.475774 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw77h\" (UniqueName: \"kubernetes.io/projected/114a8849-0be4-4fde-ada0-9d8ebf304ede-kube-api-access-hw77h\") pod \"dns-operator-744455d44c-vf8wt\" (UID: \"114a8849-0be4-4fde-ada0-9d8ebf304ede\") " pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.481260 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1412595e-0064-4407-89a2-51b8e97f49d7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.498016 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcbs2\" (UniqueName: \"kubernetes.io/projected/351d859b-8f7e-4adc-9a55-10615cd158fc-kube-api-access-wcbs2\") pod \"machine-approver-56656f9798-jlp6n\" (UID: \"351d859b-8f7e-4adc-9a55-10615cd158fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.518614 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmqgv\" (UniqueName: \"kubernetes.io/projected/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-kube-api-access-pmqgv\") pod \"controller-manager-879f6c89f-jwh8g\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.535058 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f4rl\" (UniqueName: \"kubernetes.io/projected/0cb4ca5e-83af-4dd2-aecf-5759e90f9b77-kube-api-access-8f4rl\") pod \"apiserver-76f77b778f-kzjdj\" (UID: \"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77\") " pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.537816 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.554431 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb74x\" (UniqueName: \"kubernetes.io/projected/d5eb015c-f55a-4ae3-9c01-63c1f5f167b8-kube-api-access-xb74x\") pod \"packageserver-d55dfcdfc-4949m\" (UID: \"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.572964 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znmg5\" (UniqueName: \"kubernetes.io/projected/d8abbbbe-5215-4443-8824-dec6f2a1c89e-kube-api-access-znmg5\") pod \"machine-config-controller-84d6567774-z69hg\" (UID: \"d8abbbbe-5215-4443-8824-dec6f2a1c89e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.593877 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2nzd\" (UniqueName: \"kubernetes.io/projected/3b51be9d-879f-441e-bb5e-baae8f6c8378-kube-api-access-m2nzd\") pod \"authentication-operator-69f744f599-jsklp\" (UID: \"3b51be9d-879f-441e-bb5e-baae8f6c8378\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.611157 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv66t\" (UniqueName: \"kubernetes.io/projected/a41e126a-492d-4340-bb48-772b1364fd25-kube-api-access-gv66t\") pod \"service-ca-9c57cc56f-84p49\" (UID: \"a41e126a-492d-4340-bb48-772b1364fd25\") " pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.634691 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs7nq\" (UniqueName: \"kubernetes.io/projected/1412595e-0064-4407-89a2-51b8e97f49d7-kube-api-access-hs7nq\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.640139 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.658841 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.681398 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.688780 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.698615 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.711235 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.719165 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 13 07:06:38 crc kubenswrapper[4733]: W1213 07:06:38.729606 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod351d859b_8f7e_4adc_9a55_10615cd158fc.slice/crio-8d50ca0377181546534ec048c33b8da7b6f995bdb5f7d9b91ab4b5ad1af4e3ee WatchSource:0}: Error finding container 8d50ca0377181546534ec048c33b8da7b6f995bdb5f7d9b91ab4b5ad1af4e3ee: Status 404 returned error can't find the container with id 8d50ca0377181546534ec048c33b8da7b6f995bdb5f7d9b91ab4b5ad1af4e3ee Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.734717 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vf8wt"] Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.738310 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.758812 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.760489 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.782033 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.799427 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.799755 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.817644 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-84p49" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.818994 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.831529 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.838839 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.848353 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.860934 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.861046 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kzjdj"] Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.878644 4733 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.898439 4733 request.go:700] Waited for 1.877500081s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.900722 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.919355 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 13 07:06:38 crc kubenswrapper[4733]: I1213 07:06:38.977484 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2l48\" (UniqueName: \"kubernetes.io/projected/875adea7-a638-4064-a466-2ab4e37771e3-kube-api-access-x2l48\") pod \"oauth-openshift-558db77b4-xqvh8\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.012967 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89jht\" (UniqueName: \"kubernetes.io/projected/5a860e32-a83a-403e-aed7-a3e703a0b0cb-kube-api-access-89jht\") pod \"apiserver-7bbb656c7d-4vqnd\" (UID: \"5a860e32-a83a-403e-aed7-a3e703a0b0cb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.015852 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjb26\" (UniqueName: \"kubernetes.io/projected/bbe545cc-6362-4ade-b7aa-65e5fa4892e2-kube-api-access-xjb26\") pod \"router-default-5444994796-2qw2l\" (UID: \"bbe545cc-6362-4ade-b7aa-65e5fa4892e2\") " pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.058668 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.074428 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jsklp"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.079825 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: W1213 07:06:39.085263 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b51be9d_879f_441e_bb5e_baae8f6c8378.slice/crio-4c21523a392bd82b4d6221976df0689e17365a90ec7c2563b86cb1eff1d7d07c WatchSource:0}: Error finding container 4c21523a392bd82b4d6221976df0689e17365a90ec7c2563b86cb1eff1d7d07c: Status 404 returned error can't find the container with id 4c21523a392bd82b4d6221976df0689e17365a90ec7c2563b86cb1eff1d7d07c Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088742 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088772 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2k2k\" (UniqueName: \"kubernetes.io/projected/209b5640-5db3-46dc-8dd1-1a302a385526-kube-api-access-j2k2k\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088799 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/78af3cea-e4f1-4604-95f7-07db771f23b6-images\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088815 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e03d4bbe-6cd7-4c00-8238-e036fdb68171-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088836 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9453127-1a25-4598-9247-928891567163-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088868 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-client-ca\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088884 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-trusted-ca\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088902 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-bound-sa-token\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088917 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78af3cea-e4f1-4604-95f7-07db771f23b6-config\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.088931 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-oauth-serving-cert\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089356 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/209b5640-5db3-46dc-8dd1-1a302a385526-console-serving-cert\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089421 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9453127-1a25-4598-9247-928891567163-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089472 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/64178ade-8a25-4622-91a4-d09ae57fadf1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089498 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47834947-2c02-4560-8970-c3bedbfd9b40-trusted-ca\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089521 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rnzn\" (UniqueName: \"kubernetes.io/projected/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-kube-api-access-9rnzn\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089567 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/78af3cea-e4f1-4604-95f7-07db771f23b6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089594 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/686e2567-9406-45dd-a8f5-b1cadd2efda2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089648 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-trusted-ca-bundle\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089671 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47834947-2c02-4560-8970-c3bedbfd9b40-serving-cert\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089694 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhl86\" (UniqueName: \"kubernetes.io/projected/e546cb09-56f5-4f26-aa9f-fb0cdbedcffe-kube-api-access-xhl86\") pod \"cluster-samples-operator-665b6dd947-mq99q\" (UID: \"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089719 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089743 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-certificates\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089764 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt4sk\" (UniqueName: \"kubernetes.io/projected/47834947-2c02-4560-8970-c3bedbfd9b40-kube-api-access-zt4sk\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089810 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg57c\" (UniqueName: \"kubernetes.io/projected/b17e913c-22ff-4166-bad2-fc4b1f9c6155-kube-api-access-bg57c\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089844 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-tls\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089866 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99clg\" (UniqueName: \"kubernetes.io/projected/686e2567-9406-45dd-a8f5-b1cadd2efda2-kube-api-access-99clg\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089890 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/64178ade-8a25-4622-91a4-d09ae57fadf1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089912 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/209b5640-5db3-46dc-8dd1-1a302a385526-console-oauth-config\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089933 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e546cb09-56f5-4f26-aa9f-fb0cdbedcffe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mq99q\" (UID: \"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.089981 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090012 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-console-config\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090032 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-config\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090058 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e03d4bbe-6cd7-4c00-8238-e036fdb68171-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090099 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/686e2567-9406-45dd-a8f5-b1cadd2efda2-serving-cert\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090149 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6552x\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-kube-api-access-6552x\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090188 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e03d4bbe-6cd7-4c00-8238-e036fdb68171-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090229 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-service-ca\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090253 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090280 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v689\" (UniqueName: \"kubernetes.io/projected/f9453127-1a25-4598-9247-928891567163-kube-api-access-4v689\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090303 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqf42\" (UniqueName: \"kubernetes.io/projected/78af3cea-e4f1-4604-95f7-07db771f23b6-kube-api-access-wqf42\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090322 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47834947-2c02-4560-8970-c3bedbfd9b40-config\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.090357 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e913c-22ff-4166-bad2-fc4b1f9c6155-serving-cert\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.090425 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:39.590410398 +0000 UTC m=+144.201640961 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.097925 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.107451 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.114737 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jwh8g"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.121398 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.139803 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.159177 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.177772 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-84p49"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.178913 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.190136 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.190935 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.191127 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:39.691095473 +0000 UTC m=+144.302326026 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191235 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78af3cea-e4f1-4604-95f7-07db771f23b6-config\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191285 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-csi-data-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191360 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-metrics-tls\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191394 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjqtl\" (UniqueName: \"kubernetes.io/projected/e4272f28-ff3e-4543-95fc-322a430ef74a-kube-api-access-bjqtl\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191424 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/64178ade-8a25-4622-91a4-d09ae57fadf1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191451 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mvwn\" (UniqueName: \"kubernetes.io/projected/0c7fb422-f539-48c4-94c3-02602bb323d6-kube-api-access-5mvwn\") pod \"multus-admission-controller-857f4d67dd-g9699\" (UID: \"0c7fb422-f539-48c4-94c3-02602bb323d6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191478 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f40f667-1ba4-4725-ae8b-326082ca9cba-profile-collector-cert\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191518 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/78af3cea-e4f1-4604-95f7-07db771f23b6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191549 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/686e2567-9406-45dd-a8f5-b1cadd2efda2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191576 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbfz5\" (UniqueName: \"kubernetes.io/projected/421cb8d9-2bc0-42a2-9c80-7676cecda20b-kube-api-access-sbfz5\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191606 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/81e6cded-065e-4ed8-a97c-a55e128a4aaf-profile-collector-cert\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191659 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-trusted-ca-bundle\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191728 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191794 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt4sk\" (UniqueName: \"kubernetes.io/projected/47834947-2c02-4560-8970-c3bedbfd9b40-kube-api-access-zt4sk\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191817 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg57c\" (UniqueName: \"kubernetes.io/projected/b17e913c-22ff-4166-bad2-fc4b1f9c6155-kube-api-access-bg57c\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191838 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/421cb8d9-2bc0-42a2-9c80-7676cecda20b-trusted-ca\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191858 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x75z\" (UniqueName: \"kubernetes.io/projected/e263441b-77f1-4f99-b1ba-419525173cf0-kube-api-access-8x75z\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191882 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae37ee7f-716a-49c6-938c-30da6af9ba92-serving-cert\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191919 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-tls\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191940 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99clg\" (UniqueName: \"kubernetes.io/projected/686e2567-9406-45dd-a8f5-b1cadd2efda2-kube-api-access-99clg\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.191960 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/209b5640-5db3-46dc-8dd1-1a302a385526-console-oauth-config\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192012 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e546cb09-56f5-4f26-aa9f-fb0cdbedcffe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mq99q\" (UID: \"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192038 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b71bebb-d9b6-4d26-8a46-eb83bb713af5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-t82l8\" (UID: \"8b71bebb-d9b6-4d26-8a46-eb83bb713af5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192074 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/686e2567-9406-45dd-a8f5-b1cadd2efda2-serving-cert\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192111 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a512f203-a0f8-4b45-b957-b19986851387-config\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192134 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-config-volume\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192158 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-socket-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192177 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-mountpoint-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192202 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47834947-2c02-4560-8970-c3bedbfd9b40-config\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192242 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-service-ca\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192271 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-config\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192295 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e913c-22ff-4166-bad2-fc4b1f9c6155-serving-cert\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192320 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192355 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/81e6cded-065e-4ed8-a97c-a55e128a4aaf-srv-cert\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192384 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192405 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/421cb8d9-2bc0-42a2-9c80-7676cecda20b-metrics-tls\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192426 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgtjl\" (UniqueName: \"kubernetes.io/projected/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-kube-api-access-jgtjl\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192446 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-plugins-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192466 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e263441b-77f1-4f99-b1ba-419525173cf0-secret-volume\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.192513 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/686e2567-9406-45dd-a8f5-b1cadd2efda2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.193482 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-ca\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.193736 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e03d4bbe-6cd7-4c00-8238-e036fdb68171-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.193772 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjntp\" (UniqueName: \"kubernetes.io/projected/ae37ee7f-716a-49c6-938c-30da6af9ba92-kube-api-access-rjntp\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.193995 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194293 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f276c387-4478-4a04-abb8-6b3db599690a-node-bootstrap-token\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194337 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e4272f28-ff3e-4543-95fc-322a430ef74a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194446 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt6mx\" (UniqueName: \"kubernetes.io/projected/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-kube-api-access-mt6mx\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194617 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-trusted-ca\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194643 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-client-ca\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194667 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccv52\" (UniqueName: \"kubernetes.io/projected/544cc374-976e-49c9-b1b6-2ff1df7e133d-kube-api-access-ccv52\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194734 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-registration-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194780 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-bound-sa-token\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194803 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-oauth-serving-cert\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.194838 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e263441b-77f1-4f99-b1ba-419525173cf0-config-volume\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.195141 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/209b5640-5db3-46dc-8dd1-1a302a385526-console-serving-cert\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.195366 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1-cert\") pod \"ingress-canary-9p8cj\" (UID: \"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1\") " pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.195405 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9453127-1a25-4598-9247-928891567163-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.195551 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/544cc374-976e-49c9-b1b6-2ff1df7e133d-serving-cert\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.195762 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-client-ca\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.196000 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-trusted-ca\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.196301 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e913c-22ff-4166-bad2-fc4b1f9c6155-serving-cert\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.198187 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.198649 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/686e2567-9406-45dd-a8f5-b1cadd2efda2-serving-cert\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.200318 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9453127-1a25-4598-9247-928891567163-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.206750 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-oauth-serving-cert\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207429 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92cjs\" (UniqueName: \"kubernetes.io/projected/81e6cded-065e-4ed8-a97c-a55e128a4aaf-kube-api-access-92cjs\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207496 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rnzn\" (UniqueName: \"kubernetes.io/projected/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-kube-api-access-9rnzn\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207547 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47834947-2c02-4560-8970-c3bedbfd9b40-trusted-ca\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207571 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a512f203-a0f8-4b45-b957-b19986851387-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207593 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0c7fb422-f539-48c4-94c3-02602bb323d6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g9699\" (UID: \"0c7fb422-f539-48c4-94c3-02602bb323d6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207604 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/78af3cea-e4f1-4604-95f7-07db771f23b6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207678 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd887374-af2e-4eb9-9284-442a533e9d29-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207699 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpjsg\" (UniqueName: \"kubernetes.io/projected/1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1-kube-api-access-cpjsg\") pod \"ingress-canary-9p8cj\" (UID: \"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1\") " pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207719 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhl86\" (UniqueName: \"kubernetes.io/projected/e546cb09-56f5-4f26-aa9f-fb0cdbedcffe-kube-api-access-xhl86\") pod \"cluster-samples-operator-665b6dd947-mq99q\" (UID: \"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207735 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbrl6\" (UniqueName: \"kubernetes.io/projected/146d0764-767d-402d-9538-64d5fee9d6c9-kube-api-access-zbrl6\") pod \"migrator-59844c95c7-pjqbf\" (UID: \"146d0764-767d-402d-9538-64d5fee9d6c9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207751 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mhcn\" (UniqueName: \"kubernetes.io/projected/f276c387-4478-4a04-abb8-6b3db599690a-kube-api-access-5mhcn\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207773 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47834947-2c02-4560-8970-c3bedbfd9b40-serving-cert\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207814 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-client\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207865 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-certificates\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207888 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e4272f28-ff3e-4543-95fc-322a430ef74a-proxy-tls\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207939 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a512f203-a0f8-4b45-b957-b19986851387-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207993 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/64178ade-8a25-4622-91a4-d09ae57fadf1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208012 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208031 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-config\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208052 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6ldd\" (UniqueName: \"kubernetes.io/projected/4f40f667-1ba4-4725-ae8b-326082ca9cba-kube-api-access-t6ldd\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208080 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208098 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-console-config\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208124 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e03d4bbe-6cd7-4c00-8238-e036fdb68171-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208143 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbgh7\" (UniqueName: \"kubernetes.io/projected/8b71bebb-d9b6-4d26-8a46-eb83bb713af5-kube-api-access-dbgh7\") pod \"control-plane-machine-set-operator-78cbb6b69f-t82l8\" (UID: \"8b71bebb-d9b6-4d26-8a46-eb83bb713af5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208163 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e4272f28-ff3e-4543-95fc-322a430ef74a-images\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208190 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/822753a6-f2ae-42df-adc6-0cb5113d1385-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qcmlb\" (UID: \"822753a6-f2ae-42df-adc6-0cb5113d1385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208225 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd887374-af2e-4eb9-9284-442a533e9d29-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208249 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e03d4bbe-6cd7-4c00-8238-e036fdb68171-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208271 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/421cb8d9-2bc0-42a2-9c80-7676cecda20b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208292 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208345 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6552x\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-kube-api-access-6552x\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208383 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v689\" (UniqueName: \"kubernetes.io/projected/f9453127-1a25-4598-9247-928891567163-kube-api-access-4v689\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208404 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-service-ca\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208426 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208447 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f276c387-4478-4a04-abb8-6b3db599690a-certs\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208510 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqf42\" (UniqueName: \"kubernetes.io/projected/78af3cea-e4f1-4604-95f7-07db771f23b6-kube-api-access-wqf42\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208533 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg9kc\" (UniqueName: \"kubernetes.io/projected/bd887374-af2e-4eb9-9284-442a533e9d29-kube-api-access-rg9kc\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208594 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/64178ade-8a25-4622-91a4-d09ae57fadf1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208623 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnxr5\" (UniqueName: \"kubernetes.io/projected/822753a6-f2ae-42df-adc6-0cb5113d1385-kube-api-access-qnxr5\") pod \"package-server-manager-789f6589d5-qcmlb\" (UID: \"822753a6-f2ae-42df-adc6-0cb5113d1385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208664 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.207599 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-tls\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.208807 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e546cb09-56f5-4f26-aa9f-fb0cdbedcffe-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mq99q\" (UID: \"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.209275 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:39.709260284 +0000 UTC m=+144.320490767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.209519 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2k2k\" (UniqueName: \"kubernetes.io/projected/209b5640-5db3-46dc-8dd1-1a302a385526-kube-api-access-j2k2k\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.209841 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e03d4bbe-6cd7-4c00-8238-e036fdb68171-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.210002 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-certificates\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.210395 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-config\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.210463 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vcm8\" (UniqueName: \"kubernetes.io/projected/404840cf-dd63-4b5f-bde2-49af2c7ace66-kube-api-access-9vcm8\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.210504 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/78af3cea-e4f1-4604-95f7-07db771f23b6-images\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.210543 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9453127-1a25-4598-9247-928891567163-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.210573 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae37ee7f-716a-49c6-938c-30da6af9ba92-config\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.210622 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f40f667-1ba4-4725-ae8b-326082ca9cba-srv-cert\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.211348 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-config\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.211580 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/64178ade-8a25-4622-91a4-d09ae57fadf1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.211680 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9453127-1a25-4598-9247-928891567163-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.211957 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e03d4bbe-6cd7-4c00-8238-e036fdb68171-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.213186 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.218256 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.237923 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.244786 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.246911 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/209b5640-5db3-46dc-8dd1-1a302a385526-console-oauth-config\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.259183 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.262668 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78af3cea-e4f1-4604-95f7-07db771f23b6-config\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.278673 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.305947 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311517 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.311596 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:39.811576689 +0000 UTC m=+144.422807172 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311707 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccv52\" (UniqueName: \"kubernetes.io/projected/544cc374-976e-49c9-b1b6-2ff1df7e133d-kube-api-access-ccv52\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311743 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-registration-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311783 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e263441b-77f1-4f99-b1ba-419525173cf0-config-volume\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311803 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1-cert\") pod \"ingress-canary-9p8cj\" (UID: \"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1\") " pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311826 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/544cc374-976e-49c9-b1b6-2ff1df7e133d-serving-cert\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311847 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92cjs\" (UniqueName: \"kubernetes.io/projected/81e6cded-065e-4ed8-a97c-a55e128a4aaf-kube-api-access-92cjs\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311896 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a512f203-a0f8-4b45-b957-b19986851387-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311918 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0c7fb422-f539-48c4-94c3-02602bb323d6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g9699\" (UID: \"0c7fb422-f539-48c4-94c3-02602bb323d6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311946 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd887374-af2e-4eb9-9284-442a533e9d29-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.311966 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpjsg\" (UniqueName: \"kubernetes.io/projected/1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1-kube-api-access-cpjsg\") pod \"ingress-canary-9p8cj\" (UID: \"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1\") " pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312030 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbrl6\" (UniqueName: \"kubernetes.io/projected/146d0764-767d-402d-9538-64d5fee9d6c9-kube-api-access-zbrl6\") pod \"migrator-59844c95c7-pjqbf\" (UID: \"146d0764-767d-402d-9538-64d5fee9d6c9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312051 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mhcn\" (UniqueName: \"kubernetes.io/projected/f276c387-4478-4a04-abb8-6b3db599690a-kube-api-access-5mhcn\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312072 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-client\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312092 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e4272f28-ff3e-4543-95fc-322a430ef74a-proxy-tls\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312147 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-registration-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312172 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a512f203-a0f8-4b45-b957-b19986851387-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312199 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312258 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312296 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6ldd\" (UniqueName: \"kubernetes.io/projected/4f40f667-1ba4-4725-ae8b-326082ca9cba-kube-api-access-t6ldd\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312323 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbgh7\" (UniqueName: \"kubernetes.io/projected/8b71bebb-d9b6-4d26-8a46-eb83bb713af5-kube-api-access-dbgh7\") pod \"control-plane-machine-set-operator-78cbb6b69f-t82l8\" (UID: \"8b71bebb-d9b6-4d26-8a46-eb83bb713af5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312347 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e4272f28-ff3e-4543-95fc-322a430ef74a-images\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312369 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/822753a6-f2ae-42df-adc6-0cb5113d1385-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qcmlb\" (UID: \"822753a6-f2ae-42df-adc6-0cb5113d1385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312390 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd887374-af2e-4eb9-9284-442a533e9d29-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312418 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/421cb8d9-2bc0-42a2-9c80-7676cecda20b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312438 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312475 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f276c387-4478-4a04-abb8-6b3db599690a-certs\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312523 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg9kc\" (UniqueName: \"kubernetes.io/projected/bd887374-af2e-4eb9-9284-442a533e9d29-kube-api-access-rg9kc\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312546 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnxr5\" (UniqueName: \"kubernetes.io/projected/822753a6-f2ae-42df-adc6-0cb5113d1385-kube-api-access-qnxr5\") pod \"package-server-manager-789f6589d5-qcmlb\" (UID: \"822753a6-f2ae-42df-adc6-0cb5113d1385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312568 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312605 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-config\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312627 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vcm8\" (UniqueName: \"kubernetes.io/projected/404840cf-dd63-4b5f-bde2-49af2c7ace66-kube-api-access-9vcm8\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312659 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae37ee7f-716a-49c6-938c-30da6af9ba92-config\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312683 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f40f667-1ba4-4725-ae8b-326082ca9cba-srv-cert\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312710 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-csi-data-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312733 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-metrics-tls\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312752 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjqtl\" (UniqueName: \"kubernetes.io/projected/e4272f28-ff3e-4543-95fc-322a430ef74a-kube-api-access-bjqtl\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312769 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mvwn\" (UniqueName: \"kubernetes.io/projected/0c7fb422-f539-48c4-94c3-02602bb323d6-kube-api-access-5mvwn\") pod \"multus-admission-controller-857f4d67dd-g9699\" (UID: \"0c7fb422-f539-48c4-94c3-02602bb323d6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312785 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbfz5\" (UniqueName: \"kubernetes.io/projected/421cb8d9-2bc0-42a2-9c80-7676cecda20b-kube-api-access-sbfz5\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312799 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f40f667-1ba4-4725-ae8b-326082ca9cba-profile-collector-cert\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312816 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/81e6cded-065e-4ed8-a97c-a55e128a4aaf-profile-collector-cert\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312849 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/421cb8d9-2bc0-42a2-9c80-7676cecda20b-trusted-ca\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312864 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x75z\" (UniqueName: \"kubernetes.io/projected/e263441b-77f1-4f99-b1ba-419525173cf0-kube-api-access-8x75z\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312887 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae37ee7f-716a-49c6-938c-30da6af9ba92-serving-cert\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312906 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b71bebb-d9b6-4d26-8a46-eb83bb713af5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-t82l8\" (UID: \"8b71bebb-d9b6-4d26-8a46-eb83bb713af5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312924 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a512f203-a0f8-4b45-b957-b19986851387-config\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312937 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-config-volume\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312953 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-socket-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312953 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-csi-data-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.312974 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-mountpoint-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313015 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-service-ca\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313037 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-config\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313061 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313091 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/421cb8d9-2bc0-42a2-9c80-7676cecda20b-metrics-tls\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313112 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgtjl\" (UniqueName: \"kubernetes.io/projected/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-kube-api-access-jgtjl\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313133 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/81e6cded-065e-4ed8-a97c-a55e128a4aaf-srv-cert\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313162 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-ca\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313181 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-plugins-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313203 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e263441b-77f1-4f99-b1ba-419525173cf0-secret-volume\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313228 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e263441b-77f1-4f99-b1ba-419525173cf0-config-volume\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313254 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjntp\" (UniqueName: \"kubernetes.io/projected/ae37ee7f-716a-49c6-938c-30da6af9ba92-kube-api-access-rjntp\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313456 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f276c387-4478-4a04-abb8-6b3db599690a-node-bootstrap-token\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313485 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e4272f28-ff3e-4543-95fc-322a430ef74a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.313525 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt6mx\" (UniqueName: \"kubernetes.io/projected/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-kube-api-access-mt6mx\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.314407 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae37ee7f-716a-49c6-938c-30da6af9ba92-config\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.315972 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a512f203-a0f8-4b45-b957-b19986851387-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.315972 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-client\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.316373 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/544cc374-976e-49c9-b1b6-2ff1df7e133d-serving-cert\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.316564 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:39.81655051 +0000 UTC m=+144.427780983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.317062 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0c7fb422-f539-48c4-94c3-02602bb323d6-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g9699\" (UID: \"0c7fb422-f539-48c4-94c3-02602bb323d6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.317285 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f276c387-4478-4a04-abb8-6b3db599690a-node-bootstrap-token\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.318229 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e4272f28-ff3e-4543-95fc-322a430ef74a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.318699 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e4272f28-ff3e-4543-95fc-322a430ef74a-proxy-tls\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.319382 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd887374-af2e-4eb9-9284-442a533e9d29-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.319775 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.321594 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/81e6cded-065e-4ed8-a97c-a55e128a4aaf-srv-cert\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.322091 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/421cb8d9-2bc0-42a2-9c80-7676cecda20b-metrics-tls\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.322200 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-socket-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.322297 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-mountpoint-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.322591 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae37ee7f-716a-49c6-938c-30da6af9ba92-serving-cert\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.322722 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.322836 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/421cb8d9-2bc0-42a2-9c80-7676cecda20b-trusted-ca\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.324273 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a512f203-a0f8-4b45-b957-b19986851387-config\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.324661 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/822753a6-f2ae-42df-adc6-0cb5113d1385-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qcmlb\" (UID: \"822753a6-f2ae-42df-adc6-0cb5113d1385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.325515 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f40f667-1ba4-4725-ae8b-326082ca9cba-profile-collector-cert\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.325610 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-config\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.325705 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f276c387-4478-4a04-abb8-6b3db599690a-certs\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.325789 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-plugins-dir\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.326115 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-metrics-tls\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.326552 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f40f667-1ba4-4725-ae8b-326082ca9cba-srv-cert\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.326780 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e263441b-77f1-4f99-b1ba-419525173cf0-secret-volume\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.326942 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/81e6cded-065e-4ed8-a97c-a55e128a4aaf-profile-collector-cert\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.327574 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd887374-af2e-4eb9-9284-442a533e9d29-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.327721 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-config-volume\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.327977 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1-cert\") pod \"ingress-canary-9p8cj\" (UID: \"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1\") " pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.330985 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.339090 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.341611 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/78af3cea-e4f1-4604-95f7-07db771f23b6-images\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.358903 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.379056 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.389120 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/209b5640-5db3-46dc-8dd1-1a302a385526-console-serving-cert\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.398686 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.407391 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlbjk\" (UniqueName: \"kubernetes.io/projected/02e53589-ca3a-41cc-be79-94b5453bf411-kube-api-access-nlbjk\") pod \"downloads-7954f5f757-r59jx\" (UID: \"02e53589-ca3a-41cc-be79-94b5453bf411\") " pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.414660 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.414895 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:39.914862133 +0000 UTC m=+144.526092616 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.415101 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.416641 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:39.916603386 +0000 UTC m=+144.527833949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.417582 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" event={"ID":"114a8849-0be4-4fde-ada0-9d8ebf304ede","Type":"ContainerStarted","Data":"25f343c7b51420cb1ba163bde815f05eee812e99e08e5b02b5a0b7bf8e087e88"} Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.418444 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.418846 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.420135 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" event={"ID":"3b51be9d-879f-441e-bb5e-baae8f6c8378","Type":"ContainerStarted","Data":"4c21523a392bd82b4d6221976df0689e17365a90ec7c2563b86cb1eff1d7d07c"} Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.420164 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.420386 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-console-config\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.421188 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" event={"ID":"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77","Type":"ContainerStarted","Data":"f164e279223be44e1310499b81b13db5910c9258ef61c47fd0edde9b39fca852"} Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.422041 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" event={"ID":"351d859b-8f7e-4adc-9a55-10615cd158fc","Type":"ContainerStarted","Data":"8d50ca0377181546534ec048c33b8da7b6f995bdb5f7d9b91ab4b5ad1af4e3ee"} Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.438040 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.440038 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-service-ca\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.457901 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.462686 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1412595e-0064-4407-89a2-51b8e97f49d7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-n66g9\" (UID: \"1412595e-0064-4407-89a2-51b8e97f49d7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.478837 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.486486 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.490722 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.498877 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.516544 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.516716 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.016693163 +0000 UTC m=+144.627923696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.517027 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.517423 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.017405115 +0000 UTC m=+144.628635658 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.523102 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.536782 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e4272f28-ff3e-4543-95fc-322a430ef74a-images\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.537716 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-config\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.537935 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/209b5640-5db3-46dc-8dd1-1a302a385526-trusted-ca-bundle\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.538136 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47834947-2c02-4560-8970-c3bedbfd9b40-trusted-ca\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.538162 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47834947-2c02-4560-8970-c3bedbfd9b40-config\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.538447 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-service-ca\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.538628 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/544cc374-976e-49c9-b1b6-2ff1df7e133d-etcd-ca\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.538912 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.539430 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/8b71bebb-d9b6-4d26-8a46-eb83bb713af5-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-t82l8\" (UID: \"8b71bebb-d9b6-4d26-8a46-eb83bb713af5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.540314 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: W1213 07:06:39.543818 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1ccefcd_4ad4_4011_82f8_1236dc6e4b1a.slice/crio-f56638a3bb8770d674a91c0fe986b7893695837162fcb29fc14ad6881429df6b WatchSource:0}: Error finding container f56638a3bb8770d674a91c0fe986b7893695837162fcb29fc14ad6881429df6b: Status 404 returned error can't find the container with id f56638a3bb8770d674a91c0fe986b7893695837162fcb29fc14ad6881429df6b Dec 13 07:06:39 crc kubenswrapper[4733]: W1213 07:06:39.545325 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda41e126a_492d_4340_bb48_772b1364fd25.slice/crio-3c189e12cfcdff12dd0e108098d1946b9e48448f2511897ec4d386ca946bbfd2 WatchSource:0}: Error finding container 3c189e12cfcdff12dd0e108098d1946b9e48448f2511897ec4d386ca946bbfd2: Status 404 returned error can't find the container with id 3c189e12cfcdff12dd0e108098d1946b9e48448f2511897ec4d386ca946bbfd2 Dec 13 07:06:39 crc kubenswrapper[4733]: W1213 07:06:39.546367 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8abbbbe_5215_4443_8824_dec6f2a1c89e.slice/crio-4844c428e0a9a57a6ea4dd664c3fe7bc21eb72016b8bcaa4b95276426ee1e541 WatchSource:0}: Error finding container 4844c428e0a9a57a6ea4dd664c3fe7bc21eb72016b8bcaa4b95276426ee1e541: Status 404 returned error can't find the container with id 4844c428e0a9a57a6ea4dd664c3fe7bc21eb72016b8bcaa4b95276426ee1e541 Dec 13 07:06:39 crc kubenswrapper[4733]: W1213 07:06:39.547827 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5eb015c_f55a_4ae3_9c01_63c1f5f167b8.slice/crio-86163f0616e7aed404e0562be76fb0790420a87d6d79cb20ccf13e50e2f55fa8 WatchSource:0}: Error finding container 86163f0616e7aed404e0562be76fb0790420a87d6d79cb20ccf13e50e2f55fa8: Status 404 returned error can't find the container with id 86163f0616e7aed404e0562be76fb0790420a87d6d79cb20ccf13e50e2f55fa8 Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.558436 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.578745 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.583403 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47834947-2c02-4560-8970-c3bedbfd9b40-serving-cert\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.618179 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.618792 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.118773181 +0000 UTC m=+144.730003664 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.636719 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt4sk\" (UniqueName: \"kubernetes.io/projected/47834947-2c02-4560-8970-c3bedbfd9b40-kube-api-access-zt4sk\") pod \"console-operator-58897d9998-sxsq6\" (UID: \"47834947-2c02-4560-8970-c3bedbfd9b40\") " pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.653382 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99clg\" (UniqueName: \"kubernetes.io/projected/686e2567-9406-45dd-a8f5-b1cadd2efda2-kube-api-access-99clg\") pod \"openshift-config-operator-7777fb866f-wh4vt\" (UID: \"686e2567-9406-45dd-a8f5-b1cadd2efda2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.679118 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg57c\" (UniqueName: \"kubernetes.io/projected/b17e913c-22ff-4166-bad2-fc4b1f9c6155-kube-api-access-bg57c\") pod \"route-controller-manager-6576b87f9c-4frbn\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.694256 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.720770 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.721160 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.221145597 +0000 UTC m=+144.832376080 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.727037 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e03d4bbe-6cd7-4c00-8238-e036fdb68171-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zn4kv\" (UID: \"e03d4bbe-6cd7-4c00-8238-e036fdb68171\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.763692 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.765479 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.791172 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-bound-sa-token\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.794573 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rnzn\" (UniqueName: \"kubernetes.io/projected/4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c-kube-api-access-9rnzn\") pod \"cluster-image-registry-operator-dc59b4c8b-6f4n7\" (UID: \"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.795649 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6552x\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-kube-api-access-6552x\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.796046 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqf42\" (UniqueName: \"kubernetes.io/projected/78af3cea-e4f1-4604-95f7-07db771f23b6-kube-api-access-wqf42\") pod \"machine-api-operator-5694c8668f-lbkq2\" (UID: \"78af3cea-e4f1-4604-95f7-07db771f23b6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.819392 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhl86\" (UniqueName: \"kubernetes.io/projected/e546cb09-56f5-4f26-aa9f-fb0cdbedcffe-kube-api-access-xhl86\") pod \"cluster-samples-operator-665b6dd947-mq99q\" (UID: \"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.821828 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.822407 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.322389419 +0000 UTC m=+144.933619902 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.835911 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2k2k\" (UniqueName: \"kubernetes.io/projected/209b5640-5db3-46dc-8dd1-1a302a385526-kube-api-access-j2k2k\") pod \"console-f9d7485db-z4czb\" (UID: \"209b5640-5db3-46dc-8dd1-1a302a385526\") " pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.859302 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.860714 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v689\" (UniqueName: \"kubernetes.io/projected/f9453127-1a25-4598-9247-928891567163-kube-api-access-4v689\") pod \"kube-storage-version-migrator-operator-b67b599dd-28mln\" (UID: \"f9453127-1a25-4598-9247-928891567163\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.863911 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.865341 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-r59jx"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.872364 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccv52\" (UniqueName: \"kubernetes.io/projected/544cc374-976e-49c9-b1b6-2ff1df7e133d-kube-api-access-ccv52\") pod \"etcd-operator-b45778765-cbq4r\" (UID: \"544cc374-976e-49c9-b1b6-2ff1df7e133d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.891361 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/421cb8d9-2bc0-42a2-9c80-7676cecda20b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.901314 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.918754 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vcm8\" (UniqueName: \"kubernetes.io/projected/404840cf-dd63-4b5f-bde2-49af2c7ace66-kube-api-access-9vcm8\") pod \"marketplace-operator-79b997595-k5dwn\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.924406 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:39 crc kubenswrapper[4733]: E1213 07:06:39.925850 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.425824818 +0000 UTC m=+145.037055341 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.926385 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xqvh8"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.935073 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.937894 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjntp\" (UniqueName: \"kubernetes.io/projected/ae37ee7f-716a-49c6-938c-30da6af9ba92-kube-api-access-rjntp\") pod \"service-ca-operator-777779d784-nlspd\" (UID: \"ae37ee7f-716a-49c6-938c-30da6af9ba92\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:39 crc kubenswrapper[4733]: W1213 07:06:39.943175 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02e53589_ca3a_41cc_be79_94b5453bf411.slice/crio-d93174ecfb9d6901dddf5d93b854d20c334807427a6d16fe45a7e57371c6e412 WatchSource:0}: Error finding container d93174ecfb9d6901dddf5d93b854d20c334807427a6d16fe45a7e57371c6e412: Status 404 returned error can't find the container with id d93174ecfb9d6901dddf5d93b854d20c334807427a6d16fe45a7e57371c6e412 Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.946139 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.957137 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt6mx\" (UniqueName: \"kubernetes.io/projected/5719f6d0-51b0-42ac-8db3-c32bb2d95e19-kube-api-access-mt6mx\") pod \"dns-default-z8h96\" (UID: \"5719f6d0-51b0-42ac-8db3-c32bb2d95e19\") " pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.969860 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd"] Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.973043 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpjsg\" (UniqueName: \"kubernetes.io/projected/1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1-kube-api-access-cpjsg\") pod \"ingress-canary-9p8cj\" (UID: \"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1\") " pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:39 crc kubenswrapper[4733]: I1213 07:06:39.991686 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbrl6\" (UniqueName: \"kubernetes.io/projected/146d0764-767d-402d-9538-64d5fee9d6c9-kube-api-access-zbrl6\") pod \"migrator-59844c95c7-pjqbf\" (UID: \"146d0764-767d-402d-9538-64d5fee9d6c9\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.014579 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.024865 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.025442 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.52542318 +0000 UTC m=+145.136653663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.032281 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92cjs\" (UniqueName: \"kubernetes.io/projected/81e6cded-065e-4ed8-a97c-a55e128a4aaf-kube-api-access-92cjs\") pod \"olm-operator-6b444d44fb-ztngj\" (UID: \"81e6cded-065e-4ed8-a97c-a55e128a4aaf\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.033918 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mhcn\" (UniqueName: \"kubernetes.io/projected/f276c387-4478-4a04-abb8-6b3db599690a-kube-api-access-5mhcn\") pod \"machine-config-server-l9qj7\" (UID: \"f276c387-4478-4a04-abb8-6b3db599690a\") " pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.053071 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.055497 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6ldd\" (UniqueName: \"kubernetes.io/projected/4f40f667-1ba4-4725-ae8b-326082ca9cba-kube-api-access-t6ldd\") pod \"catalog-operator-68c6474976-l66ck\" (UID: \"4f40f667-1ba4-4725-ae8b-326082ca9cba\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.062745 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.076726 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a512f203-a0f8-4b45-b957-b19986851387-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5dx2g\" (UID: \"a512f203-a0f8-4b45-b957-b19986851387\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.094070 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7fe8bce0-c7ce-454a-956f-f8c0fe428c02-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5h24w\" (UID: \"7fe8bce0-c7ce-454a-956f-f8c0fe428c02\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.097358 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.103680 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.110496 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.114597 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjqtl\" (UniqueName: \"kubernetes.io/projected/e4272f28-ff3e-4543-95fc-322a430ef74a-kube-api-access-bjqtl\") pod \"machine-config-operator-74547568cd-975kj\" (UID: \"e4272f28-ff3e-4543-95fc-322a430ef74a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.126805 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.127253 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.62723729 +0000 UTC m=+145.238467773 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.151503 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.163173 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.177117 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.186276 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.243037 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.243394 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg9kc\" (UniqueName: \"kubernetes.io/projected/bd887374-af2e-4eb9-9284-442a533e9d29-kube-api-access-rg9kc\") pod \"openshift-controller-manager-operator-756b6f6bc6-cl5ds\" (UID: \"bd887374-af2e-4eb9-9284-442a533e9d29\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.244336 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.744319183 +0000 UTC m=+145.355549666 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.244751 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.244800 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgtjl\" (UniqueName: \"kubernetes.io/projected/45abc2f0-f3bd-462e-8b4a-9c6ed7c30995-kube-api-access-jgtjl\") pod \"csi-hostpathplugin-8sw6n\" (UID: \"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995\") " pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.245106 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.245491 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.245792 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-l9qj7" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.258110 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbgh7\" (UniqueName: \"kubernetes.io/projected/8b71bebb-d9b6-4d26-8a46-eb83bb713af5-kube-api-access-dbgh7\") pod \"control-plane-machine-set-operator-78cbb6b69f-t82l8\" (UID: \"8b71bebb-d9b6-4d26-8a46-eb83bb713af5\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.258744 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbfz5\" (UniqueName: \"kubernetes.io/projected/421cb8d9-2bc0-42a2-9c80-7676cecda20b-kube-api-access-sbfz5\") pod \"ingress-operator-5b745b69d9-tfv8z\" (UID: \"421cb8d9-2bc0-42a2-9c80-7676cecda20b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.259923 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9p8cj" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.276476 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mvwn\" (UniqueName: \"kubernetes.io/projected/0c7fb422-f539-48c4-94c3-02602bb323d6-kube-api-access-5mvwn\") pod \"multus-admission-controller-857f4d67dd-g9699\" (UID: \"0c7fb422-f539-48c4-94c3-02602bb323d6\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.282552 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x75z\" (UniqueName: \"kubernetes.io/projected/e263441b-77f1-4f99-b1ba-419525173cf0-kube-api-access-8x75z\") pod \"collect-profiles-29426820-srlnz\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.296819 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.297428 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnxr5\" (UniqueName: \"kubernetes.io/projected/822753a6-f2ae-42df-adc6-0cb5113d1385-kube-api-access-qnxr5\") pod \"package-server-manager-789f6589d5-qcmlb\" (UID: \"822753a6-f2ae-42df-adc6-0cb5113d1385\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.346064 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.346419 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.846404761 +0000 UTC m=+145.457635244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.359475 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.419412 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.428103 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.442200 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.447026 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.447400 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:40.947382825 +0000 UTC m=+145.558613308 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.463482 4733 generic.go:334] "Generic (PLEG): container finished" podID="0cb4ca5e-83af-4dd2-aecf-5759e90f9b77" containerID="a1c0c2f5c64c3dfb9ad2cb78310914b05d7c6c5ac6364e877e31bfed08dd9fe7" exitCode=0 Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.463609 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" event={"ID":"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77","Type":"ContainerDied","Data":"a1c0c2f5c64c3dfb9ad2cb78310914b05d7c6c5ac6364e877e31bfed08dd9fe7"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.467329 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.471158 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" event={"ID":"351d859b-8f7e-4adc-9a55-10615cd158fc","Type":"ContainerStarted","Data":"9010d78a7397c989f249449c6ada9b8a37c1b2400e625921cbf362d2304b388a"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.471232 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" event={"ID":"351d859b-8f7e-4adc-9a55-10615cd158fc","Type":"ContainerStarted","Data":"16c516159cac9fdc4a2a22edd15c69f465dc984200ca7bf0949ac36814061565"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.474450 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-r59jx" event={"ID":"02e53589-ca3a-41cc-be79-94b5453bf411","Type":"ContainerStarted","Data":"d93174ecfb9d6901dddf5d93b854d20c334807427a6d16fe45a7e57371c6e412"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.480968 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2qw2l" event={"ID":"bbe545cc-6362-4ade-b7aa-65e5fa4892e2","Type":"ContainerStarted","Data":"5171efe1cebfc69386edfa9d89d1dc609618e0aad603ed9b00506a4b86e98117"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.481819 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" event={"ID":"686e2567-9406-45dd-a8f5-b1cadd2efda2","Type":"ContainerStarted","Data":"fa9ed3c3ba7ad77cee3ce01ce1c21727ab2baa7ab790847aaf4c513386a12149"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.490105 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-84p49" event={"ID":"a41e126a-492d-4340-bb48-772b1364fd25","Type":"ContainerStarted","Data":"4e36d38df2073f44732e006bdf3dfb7fb965d42f48f568ad4b94996ec634eced"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.490131 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-84p49" event={"ID":"a41e126a-492d-4340-bb48-772b1364fd25","Type":"ContainerStarted","Data":"3c189e12cfcdff12dd0e108098d1946b9e48448f2511897ec4d386ca946bbfd2"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.491832 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" event={"ID":"114a8849-0be4-4fde-ada0-9d8ebf304ede","Type":"ContainerStarted","Data":"2dde638cc971131540d47cd72ed355d42d289b136032d55288469ba8ccb13fbf"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.493013 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.493975 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" event={"ID":"5a860e32-a83a-403e-aed7-a3e703a0b0cb","Type":"ContainerStarted","Data":"4d47bbe48445a0927bb651f403e733b5e1d56041f2a0af81434f9b61b30e9b6e"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.497580 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" event={"ID":"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8","Type":"ContainerStarted","Data":"44f07d7b523d6142f8dba51b0a4737f6193220b2dccfaff60f3d0b55878a9cff"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.497617 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" event={"ID":"d5eb015c-f55a-4ae3-9c01-63c1f5f167b8","Type":"ContainerStarted","Data":"86163f0616e7aed404e0562be76fb0790420a87d6d79cb20ccf13e50e2f55fa8"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.499059 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" event={"ID":"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a","Type":"ContainerStarted","Data":"9192be2a57b35a1a1d0956bdea9c82894dae4ad05c1960714faa71554c216fd9"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.499096 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" event={"ID":"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a","Type":"ContainerStarted","Data":"f56638a3bb8770d674a91c0fe986b7893695837162fcb29fc14ad6881429df6b"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.500621 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" event={"ID":"d8abbbbe-5215-4443-8824-dec6f2a1c89e","Type":"ContainerStarted","Data":"f4317bc38267e11e4f4c16af800456e06040d0fe486ca9b0e22f3470b1c8873e"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.500648 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" event={"ID":"d8abbbbe-5215-4443-8824-dec6f2a1c89e","Type":"ContainerStarted","Data":"4844c428e0a9a57a6ea4dd664c3fe7bc21eb72016b8bcaa4b95276426ee1e541"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.502740 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" event={"ID":"3b51be9d-879f-441e-bb5e-baae8f6c8378","Type":"ContainerStarted","Data":"c5052697ee8b6c9b170a65482b9d822022211047c3fedf67456ba0b4bef25646"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.503833 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" event={"ID":"1412595e-0064-4407-89a2-51b8e97f49d7","Type":"ContainerStarted","Data":"03ee16b119cb0aa1e7e5d4bc2226dd2a1ae9a651f80beaccc9e0a1949350fbf7"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.505967 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" event={"ID":"875adea7-a638-4064-a466-2ab4e37771e3","Type":"ContainerStarted","Data":"5a71a52fcb764578da42e4366cf283b9a55a05238724154bbd0fbbd2bf36b9d6"} Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.525444 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.535506 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.548793 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.549186 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.049172643 +0000 UTC m=+145.660403126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.612855 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.650708 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.651580 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.151563001 +0000 UTC m=+145.762793484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.688117 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sxsq6"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.688522 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-z4czb"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.754818 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.755333 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.758177 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.758788 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.258767483 +0000 UTC m=+145.869997966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.809793 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln"] Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.859947 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.860271 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.360254603 +0000 UTC m=+145.971485086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:40 crc kubenswrapper[4733]: I1213 07:06:40.962673 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:40 crc kubenswrapper[4733]: E1213 07:06:40.963204 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.463193386 +0000 UTC m=+146.074423869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.063986 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.064542 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.564519072 +0000 UTC m=+146.175749585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.136870 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g"] Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.168196 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.168568 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.668555208 +0000 UTC m=+146.279785691 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.190893 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf"] Dec 13 07:06:41 crc kubenswrapper[4733]: W1213 07:06:41.207815 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf276c387_4478_4a04_abb8_6b3db599690a.slice/crio-64b37c21c42336cc56a0c88371345790d346557cd2213112af5b050c8a3a306f WatchSource:0}: Error finding container 64b37c21c42336cc56a0c88371345790d346557cd2213112af5b050c8a3a306f: Status 404 returned error can't find the container with id 64b37c21c42336cc56a0c88371345790d346557cd2213112af5b050c8a3a306f Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.269301 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.269470 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.76944319 +0000 UTC m=+146.380673673 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.269701 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.270098 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.770081628 +0000 UTC m=+146.381312171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: W1213 07:06:41.317729 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda512f203_a0f8_4b45_b957_b19986851387.slice/crio-e1402eccefd4682e496f4335d89c67f7c188cfa045ba72146d5235fe17d0f39d WatchSource:0}: Error finding container e1402eccefd4682e496f4335d89c67f7c188cfa045ba72146d5235fe17d0f39d: Status 404 returned error can't find the container with id e1402eccefd4682e496f4335d89c67f7c188cfa045ba72146d5235fe17d0f39d Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.372783 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.373015 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.872965861 +0000 UTC m=+146.484196344 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.373198 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.373769 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.873759045 +0000 UTC m=+146.484989528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.477110 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.477746 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:41.97773082 +0000 UTC m=+146.588961303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.505572 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cbq4r"] Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.557979 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-lbkq2"] Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.561029 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj"] Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.582930 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.583356 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.083340985 +0000 UTC m=+146.694571468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.656947 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" event={"ID":"f9453127-1a25-4598-9247-928891567163","Type":"ContainerStarted","Data":"0706b448604696df2b703b6d03a3fc74ba5f55140e11d3ac36598cff64ae4b24"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.684103 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.684511 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.184497424 +0000 UTC m=+146.795727907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.686032 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" event={"ID":"47834947-2c02-4560-8970-c3bedbfd9b40","Type":"ContainerStarted","Data":"e3462e68d7f3b48b453f1c9d3f3e5581fb2d8aa5d1b94cd8ef0841b7f338c89b"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.709958 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-r59jx" event={"ID":"02e53589-ca3a-41cc-be79-94b5453bf411","Type":"ContainerStarted","Data":"7ae14e5d5bc5074711bbe6bb993210157ced36506e8d2b4f3c803f43a360bed5"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.713993 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.723739 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" event={"ID":"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe","Type":"ContainerStarted","Data":"2e23b9ad0caaeccb1c1a3f68070d0c306c2fdcdd73a32b8c06cf097fc2ce94f3"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.726623 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.726666 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.727692 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z4czb" event={"ID":"209b5640-5db3-46dc-8dd1-1a302a385526","Type":"ContainerStarted","Data":"d9ed28d0f6a8010972c003efc3cf07ba27d7fe0643c0df411bc7f703c3c41e45"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.764021 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" event={"ID":"114a8849-0be4-4fde-ada0-9d8ebf304ede","Type":"ContainerStarted","Data":"0a07c9cd380b7e29842ac065455cbda8b8e320403adf4d47aa6d24d807260864"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.772802 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" event={"ID":"a512f203-a0f8-4b45-b957-b19986851387","Type":"ContainerStarted","Data":"e1402eccefd4682e496f4335d89c67f7c188cfa045ba72146d5235fe17d0f39d"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.776785 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" event={"ID":"875adea7-a638-4064-a466-2ab4e37771e3","Type":"ContainerStarted","Data":"3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.781300 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.785912 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.785958 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.785983 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.787865 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.788194 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.28817985 +0000 UTC m=+146.899410333 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.800389 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2qw2l" event={"ID":"bbe545cc-6362-4ade-b7aa-65e5fa4892e2","Type":"ContainerStarted","Data":"619d4d9fec391a8c1d3de68041ca987e1f54eb5758db4fffa62cbe826773cb18"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.819468 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck"] Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.846911 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.874390 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" event={"ID":"1412595e-0064-4407-89a2-51b8e97f49d7","Type":"ContainerStarted","Data":"c4f0b64727932ee5127c02023c17e62720d96b8f092d8340145ad89e14ffc18d"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.886941 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.892519 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.892644 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.893333 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.393282439 +0000 UTC m=+147.004512922 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.893987 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" event={"ID":"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c","Type":"ContainerStarted","Data":"d60b2c984e23d420c1c48948eb7c0ab2fff941864827d2bf25626acae64639e9"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.903130 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-vf8wt" podStartSLOduration=121.903103827 podStartE2EDuration="2m1.903103827s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:41.897767266 +0000 UTC m=+146.508997759" watchObservedRunningTime="2025-12-13 07:06:41.903103827 +0000 UTC m=+146.514334310" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.939002 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.942035 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.963462 4733 generic.go:334] "Generic (PLEG): container finished" podID="5a860e32-a83a-403e-aed7-a3e703a0b0cb" containerID="90625a857594acabfcf0f0bb6fa43fb010558df42208e9d16fa12ca2d69511fa" exitCode=0 Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.963577 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" event={"ID":"5a860e32-a83a-403e-aed7-a3e703a0b0cb","Type":"ContainerDied","Data":"90625a857594acabfcf0f0bb6fa43fb010558df42208e9d16fa12ca2d69511fa"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.983530 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-l9qj7" event={"ID":"f276c387-4478-4a04-abb8-6b3db599690a","Type":"ContainerStarted","Data":"64b37c21c42336cc56a0c88371345790d346557cd2213112af5b050c8a3a306f"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.984440 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" event={"ID":"146d0764-767d-402d-9538-64d5fee9d6c9","Type":"ContainerStarted","Data":"eebe3506b8af156fa95dc43df97e67bf4891dd5798d13e9401e9adb19bf34f47"} Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.994940 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:41 crc kubenswrapper[4733]: E1213 07:06:41.998344 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.498327687 +0000 UTC m=+147.109558260 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:41 crc kubenswrapper[4733]: I1213 07:06:41.998458 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" event={"ID":"b17e913c-22ff-4166-bad2-fc4b1f9c6155","Type":"ContainerStarted","Data":"8cdf770903d30dbf9f8df156ed37bfa8a2068200bac2895cb850316a9fddea09"} Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.005632 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-2qw2l" podStartSLOduration=122.005598188 podStartE2EDuration="2m2.005598188s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:41.999151052 +0000 UTC m=+146.610381555" watchObservedRunningTime="2025-12-13 07:06:42.005598188 +0000 UTC m=+146.616828681" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.047548 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" event={"ID":"e03d4bbe-6cd7-4c00-8238-e036fdb68171","Type":"ContainerStarted","Data":"8f94e78f1c756ee5178163c6156ff18967ea8cdf864dde96e22255f31af30455"} Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.048548 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.050233 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.072155 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-r59jx" podStartSLOduration=122.072134557 podStartE2EDuration="2m2.072134557s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.069688972 +0000 UTC m=+146.680919455" watchObservedRunningTime="2025-12-13 07:06:42.072134557 +0000 UTC m=+146.683365050" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.096005 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.097448 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.597429554 +0000 UTC m=+147.208660037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.110531 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.126763 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.127541 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.158839 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.186343 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" podStartSLOduration=122.186325162 podStartE2EDuration="2m2.186325162s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.185465896 +0000 UTC m=+146.796696379" watchObservedRunningTime="2025-12-13 07:06:42.186325162 +0000 UTC m=+146.797555635" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.197069 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.197802 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.199938 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.699829112 +0000 UTC m=+147.311059585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.237155 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-n66g9" podStartSLOduration=122.237135924 podStartE2EDuration="2m2.237135924s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.235796733 +0000 UTC m=+146.847027216" watchObservedRunningTime="2025-12-13 07:06:42.237135924 +0000 UTC m=+146.848366407" Dec 13 07:06:42 crc kubenswrapper[4733]: W1213 07:06:42.281718 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f40f667_1ba4_4725_ae8b_326082ca9cba.slice/crio-bb8246153c30bdcdc782be6374b3db583e91dce9b77b5861d93414d71efacdeb WatchSource:0}: Error finding container bb8246153c30bdcdc782be6374b3db583e91dce9b77b5861d93414d71efacdeb: Status 404 returned error can't find the container with id bb8246153c30bdcdc782be6374b3db583e91dce9b77b5861d93414d71efacdeb Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.299395 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.300982 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.800954761 +0000 UTC m=+147.412185244 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.301956 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.302675 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.802660053 +0000 UTC m=+147.413890536 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.337017 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-jsklp" podStartSLOduration=122.336991564 podStartE2EDuration="2m2.336991564s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.318644377 +0000 UTC m=+146.929874870" watchObservedRunningTime="2025-12-13 07:06:42.336991564 +0000 UTC m=+146.948222047" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.337647 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" podStartSLOduration=121.337642244 podStartE2EDuration="2m1.337642244s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.273507458 +0000 UTC m=+146.884737941" watchObservedRunningTime="2025-12-13 07:06:42.337642244 +0000 UTC m=+146.948872727" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.387841 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" podStartSLOduration=122.387823167 podStartE2EDuration="2m2.387823167s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.34971196 +0000 UTC m=+146.960942453" watchObservedRunningTime="2025-12-13 07:06:42.387823167 +0000 UTC m=+146.999053650" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.388912 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jlp6n" podStartSLOduration=122.388905919 podStartE2EDuration="2m2.388905919s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.387725004 +0000 UTC m=+146.998955497" watchObservedRunningTime="2025-12-13 07:06:42.388905919 +0000 UTC m=+147.000136402" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.405937 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.406319 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:42.906299498 +0000 UTC m=+147.517529981 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.427555 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-84p49" podStartSLOduration=121.427532972 podStartE2EDuration="2m1.427532972s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.425422667 +0000 UTC m=+147.036653160" watchObservedRunningTime="2025-12-13 07:06:42.427532972 +0000 UTC m=+147.038763455" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.481439 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:42 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:42 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:42 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.481491 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.509228 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.509514 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.009503719 +0000 UTC m=+147.620734202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.528674 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" podStartSLOduration=122.52865848 podStartE2EDuration="2m2.52865848s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:42.528415943 +0000 UTC m=+147.139646436" watchObservedRunningTime="2025-12-13 07:06:42.52865848 +0000 UTC m=+147.139888963" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.609820 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.610059 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.110038409 +0000 UTC m=+147.721268892 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.610603 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.612196 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.112187605 +0000 UTC m=+147.723418088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.638064 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.698470 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4949m" Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.715523 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.716082 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.216069157 +0000 UTC m=+147.827299640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.817033 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.817404 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.317392321 +0000 UTC m=+147.928622804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.878707 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w"] Dec 13 07:06:42 crc kubenswrapper[4733]: I1213 07:06:42.925730 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:42 crc kubenswrapper[4733]: E1213 07:06:42.926785 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.426764751 +0000 UTC m=+148.037995234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.065971 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.066348 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.566333906 +0000 UTC m=+148.177564389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.136469 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g9699"] Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.145768 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds"] Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.151441 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9p8cj"] Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.166947 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.167280 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.667257038 +0000 UTC m=+148.278487521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.224442 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" event={"ID":"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77","Type":"ContainerStarted","Data":"0768831c62916ba199321688a4c53772159ee72077a1ad5f2bd51dfee27bb280"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.229434 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-z8h96"] Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.235008 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" event={"ID":"544cc374-976e-49c9-b1b6-2ff1df7e133d","Type":"ContainerStarted","Data":"760832f6f2a1fe4c87ca7ff8db4c2eefe8a0d22ce6ed0e8d30de9ba06ba5c2da"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.236450 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" event={"ID":"146d0764-767d-402d-9538-64d5fee9d6c9","Type":"ContainerStarted","Data":"0d1b614b6acfe089c5a7fe8f10ecb216adb19232d983102f7caa3f9906d8bc46"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.237859 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6f4n7" event={"ID":"4a6b1d88-bbba-400f-94b5-9e3bc73c7c0c","Type":"ContainerStarted","Data":"b9047f85aba17675b13cfd4c92e798eb4333e563d48f119dd16875352b68ab69"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.249397 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" event={"ID":"7fe8bce0-c7ce-454a-956f-f8c0fe428c02","Type":"ContainerStarted","Data":"46b93c59cb5ba9adc80dee9e994ba4649e2dcc9d2cebbf3bb56f31dfb5c58af3"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.250742 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" event={"ID":"78af3cea-e4f1-4604-95f7-07db771f23b6","Type":"ContainerStarted","Data":"8b4c93dabf3969e780a5960c37158472b508b2d6d765f8c409096a0aa60fbffe"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.250766 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" event={"ID":"78af3cea-e4f1-4604-95f7-07db771f23b6","Type":"ContainerStarted","Data":"cee7a198df41e49c340a95db91b9bbd8f72f4d5ed39764e0dc6dc2e4496319a1"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.252998 4733 generic.go:334] "Generic (PLEG): container finished" podID="686e2567-9406-45dd-a8f5-b1cadd2efda2" containerID="b93e395a5f876dc424e13295872973673e5ea51335fc51d3d1cc02e9c9dd04c1" exitCode=0 Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.253041 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" event={"ID":"686e2567-9406-45dd-a8f5-b1cadd2efda2","Type":"ContainerDied","Data":"b93e395a5f876dc424e13295872973673e5ea51335fc51d3d1cc02e9c9dd04c1"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.268741 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.275303 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.775286856 +0000 UTC m=+148.386517339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.292081 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb"] Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.308531 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" event={"ID":"b17e913c-22ff-4166-bad2-fc4b1f9c6155","Type":"ContainerStarted","Data":"702da900b132fc2037804eae62b99bda2c6a39b54e0a9a9765e848b5a68e3fbc"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.308916 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.334382 4733 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-4frbn container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.334453 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" podUID="b17e913c-22ff-4166-bad2-fc4b1f9c6155" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.335162 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" podStartSLOduration=122.335143662 podStartE2EDuration="2m2.335143662s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:43.334777942 +0000 UTC m=+147.946008425" watchObservedRunningTime="2025-12-13 07:06:43.335143662 +0000 UTC m=+147.946374145" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.350344 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" event={"ID":"4f40f667-1ba4-4725-ae8b-326082ca9cba","Type":"ContainerStarted","Data":"bb8246153c30bdcdc782be6374b3db583e91dce9b77b5861d93414d71efacdeb"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.372693 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.373519 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.873490437 +0000 UTC m=+148.484720980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.373715 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.374752 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.874744925 +0000 UTC m=+148.485975408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.403380 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:43 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:43 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:43 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.403450 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.411677 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" event={"ID":"81e6cded-065e-4ed8-a97c-a55e128a4aaf","Type":"ContainerStarted","Data":"6e08e253baa2291b39249c8376fec95cd33bb4c81f4ca1c92833b88c24e04575"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.474967 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.476567 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:43.976542083 +0000 UTC m=+148.587772566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.518924 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" event={"ID":"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe","Type":"ContainerStarted","Data":"665c0fbeff923229619adfbe69281591878b95091e95e9a527628c957f535de0"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.576679 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-z4czb" event={"ID":"209b5640-5db3-46dc-8dd1-1a302a385526","Type":"ContainerStarted","Data":"0e3d2bf683c7195135c173be1c31977b568a74fb6cfd6ed2152905d28ab49e19"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.582076 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.582623 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.082582051 +0000 UTC m=+148.693812544 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.592570 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" event={"ID":"d8abbbbe-5215-4443-8824-dec6f2a1c89e","Type":"ContainerStarted","Data":"05db3733a116adb7ca1cd86f1fb057244c35fcf38c7ed3507e6181fc15f4b6d2"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.647263 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" event={"ID":"47834947-2c02-4560-8970-c3bedbfd9b40","Type":"ContainerStarted","Data":"d25f467c78a5b183e671cfc5ef0ddbfdbad5408c9e53c3986125067412649e5d"} Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.647420 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.647450 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.648902 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.683394 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.685104 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.18504368 +0000 UTC m=+148.796274203 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.688818 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-z4czb" podStartSLOduration=123.688801874 podStartE2EDuration="2m3.688801874s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:43.657483424 +0000 UTC m=+148.268713907" watchObservedRunningTime="2025-12-13 07:06:43.688801874 +0000 UTC m=+148.300032357" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.691456 4733 patch_prober.go:28] interesting pod/console-operator-58897d9998-sxsq6 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.691514 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" podUID="47834947-2c02-4560-8970-c3bedbfd9b40" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.721259 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-l9qj7" podStartSLOduration=7.721243779 podStartE2EDuration="7.721243779s" podCreationTimestamp="2025-12-13 07:06:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:43.694765825 +0000 UTC m=+148.305996308" watchObservedRunningTime="2025-12-13 07:06:43.721243779 +0000 UTC m=+148.332474262" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.722342 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" podStartSLOduration=123.722335892 podStartE2EDuration="2m3.722335892s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:43.720714883 +0000 UTC m=+148.331945366" watchObservedRunningTime="2025-12-13 07:06:43.722335892 +0000 UTC m=+148.333566375" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.789821 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.814907 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.31488031 +0000 UTC m=+148.926110793 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.831566 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z69hg" podStartSLOduration=122.831548506 podStartE2EDuration="2m2.831548506s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:43.768118301 +0000 UTC m=+148.379348784" watchObservedRunningTime="2025-12-13 07:06:43.831548506 +0000 UTC m=+148.442778979" Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.866902 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz"] Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.894498 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.895019 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.395001231 +0000 UTC m=+149.006231714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:43 crc kubenswrapper[4733]: I1213 07:06:43.995847 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:43 crc kubenswrapper[4733]: E1213 07:06:43.996201 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.496162631 +0000 UTC m=+149.107393114 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.097205 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.098412 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.598352232 +0000 UTC m=+149.209582715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.164829 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z"] Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.200309 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:44 crc kubenswrapper[4733]: W1213 07:06:44.200636 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod421cb8d9_2bc0_42a2_9c80_7676cecda20b.slice/crio-b6847feaa2d4d4e0e435b940238f1218fa475530261fed8822778144ec09f439 WatchSource:0}: Error finding container b6847feaa2d4d4e0e435b940238f1218fa475530261fed8822778144ec09f439: Status 404 returned error can't find the container with id b6847feaa2d4d4e0e435b940238f1218fa475530261fed8822778144ec09f439 Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.200651 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.700638926 +0000 UTC m=+149.311869399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.216523 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:44 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:44 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:44 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.216585 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.301489 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.302006 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.801987002 +0000 UTC m=+149.413217495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.320531 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8"] Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.379656 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-nlspd"] Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.403078 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.403393 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:44.903378698 +0000 UTC m=+149.514609181 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.441462 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-975kj"] Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.507769 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.508044 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.008024534 +0000 UTC m=+149.619255017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: W1213 07:06:44.516071 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4272f28_ff3e_4543_95fc_322a430ef74a.slice/crio-9f910e5f6f408eea505c903456b7da29b4b463071f0c104344f0c65a1f8af981 WatchSource:0}: Error finding container 9f910e5f6f408eea505c903456b7da29b4b463071f0c104344f0c65a1f8af981: Status 404 returned error can't find the container with id 9f910e5f6f408eea505c903456b7da29b4b463071f0c104344f0c65a1f8af981 Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.588462 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8sw6n"] Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.588503 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5dwn"] Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.612654 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.613045 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.11303084 +0000 UTC m=+149.724261323 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.716967 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.717174 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.217146889 +0000 UTC m=+149.828377372 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.717315 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.717775 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.217760018 +0000 UTC m=+149.828990511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.765884 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" event={"ID":"146d0764-767d-402d-9538-64d5fee9d6c9","Type":"ContainerStarted","Data":"41f5393aab50d022bb2f01d3fec46031779b1c4c2ad848c26a68842f7233708b"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.807300 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pjqbf" podStartSLOduration=123.807284515 podStartE2EDuration="2m3.807284515s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:44.806345566 +0000 UTC m=+149.417576059" watchObservedRunningTime="2025-12-13 07:06:44.807284515 +0000 UTC m=+149.418514998" Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.821843 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.822900 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.322885168 +0000 UTC m=+149.934115651 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.829882 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" event={"ID":"7fe8bce0-c7ce-454a-956f-f8c0fe428c02","Type":"ContainerStarted","Data":"982b3c73d8872251b71b7e5b77b28b8e3558f3186e004ca9b2658dc34d5098b6"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.854863 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" event={"ID":"a512f203-a0f8-4b45-b957-b19986851387","Type":"ContainerStarted","Data":"d6a5f50408e07dbc8177ec1fdfc269670e76e1dee4535bfa7a4ca08132e6c03e"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.867896 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" event={"ID":"f9453127-1a25-4598-9247-928891567163","Type":"ContainerStarted","Data":"333acbacd96e2a32238f7605f30b45e95a521d86d0a6d3e684270e694d284c1a"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.886444 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" event={"ID":"5a860e32-a83a-403e-aed7-a3e703a0b0cb","Type":"ContainerStarted","Data":"043a0e57f1d714de06a603c428508ce908cdf013c65440ebdcc21f4d22f2b226"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.899090 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5h24w" podStartSLOduration=123.8990755 podStartE2EDuration="2m3.8990755s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:44.898483112 +0000 UTC m=+149.509713595" watchObservedRunningTime="2025-12-13 07:06:44.8990755 +0000 UTC m=+149.510305983" Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.918405 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9p8cj" event={"ID":"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1","Type":"ContainerStarted","Data":"2134005d9a48478cde8e93b119f86ea67848e3fcf86ad33358e3df4b5a155ed8"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.918465 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9p8cj" event={"ID":"1bf8c6e2-25a1-4bbb-a7c4-e2a5edc0d8f1","Type":"ContainerStarted","Data":"5983afdfd7f43dbf5acb7dd512dcb7c6edcfa315065885698053268b04d07936"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.929018 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:44 crc kubenswrapper[4733]: E1213 07:06:44.935235 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.435199926 +0000 UTC m=+150.046430409 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.948136 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" event={"ID":"bd887374-af2e-4eb9-9284-442a533e9d29","Type":"ContainerStarted","Data":"9fd38421fbb84f46383bd614b9e28da367f6d1c7274cc1853ec93ff734942fa6"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.948199 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" event={"ID":"bd887374-af2e-4eb9-9284-442a533e9d29","Type":"ContainerStarted","Data":"e71323693236c28bcc152c70eca645cf5638120d21418387c50d7a94d85b1dd4"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.967324 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" event={"ID":"ae37ee7f-716a-49c6-938c-30da6af9ba92","Type":"ContainerStarted","Data":"6e6626cb777410c67e96b5f337a2abca666334ea067a154e360d0b7c6bb58b1f"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.971476 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" event={"ID":"544cc374-976e-49c9-b1b6-2ff1df7e133d","Type":"ContainerStarted","Data":"b6a1f1ec1c777ccaa4a5ea035ea1f76c7e0b47ef5984ee84e1a6a340b0fa5ac7"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.973692 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" event={"ID":"e263441b-77f1-4f99-b1ba-419525173cf0","Type":"ContainerStarted","Data":"e57eade9bcfeaaaaf2dbf15cd0e9c550ed82636cb8aa3195ac9e4bdcc46f4e9c"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.982198 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" podStartSLOduration=123.982183203 podStartE2EDuration="2m3.982183203s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:44.945807659 +0000 UTC m=+149.557038142" watchObservedRunningTime="2025-12-13 07:06:44.982183203 +0000 UTC m=+149.593413686" Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.982264 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" event={"ID":"686e2567-9406-45dd-a8f5-b1cadd2efda2","Type":"ContainerStarted","Data":"fe96c63b1ca6deb8d1b5dd539eb11511523417dc25ff47231fa45c8c6d2466ea"} Dec 13 07:06:44 crc kubenswrapper[4733]: I1213 07:06:44.983594 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.080868 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" event={"ID":"e546cb09-56f5-4f26-aa9f-fb0cdbedcffe","Type":"ContainerStarted","Data":"002b21c0ac7e104f60d158defb2d963c387e4bb899bf0a6c4f46a754133d2499"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.081611 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.082421 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.582397853 +0000 UTC m=+150.193628336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.101516 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-z8h96" event={"ID":"5719f6d0-51b0-42ac-8db3-c32bb2d95e19","Type":"ContainerStarted","Data":"38177a9d15560af3a63dd4b5f8922031bd309aa0ca41d5bb22cb4fc3d77d1efe"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.103052 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-28mln" podStartSLOduration=125.103034439 podStartE2EDuration="2m5.103034439s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.101809502 +0000 UTC m=+149.713039985" watchObservedRunningTime="2025-12-13 07:06:45.103034439 +0000 UTC m=+149.714264922" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.134538 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" event={"ID":"81e6cded-065e-4ed8-a97c-a55e128a4aaf","Type":"ContainerStarted","Data":"adb87729ca70d430512619cbcec12e7d4a0b34163aefdb8b85fc85f45afc341b"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.135625 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.166968 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" event={"ID":"e4272f28-ff3e-4543-95fc-322a430ef74a","Type":"ContainerStarted","Data":"9f910e5f6f408eea505c903456b7da29b4b463071f0c104344f0c65a1f8af981"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.183631 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" event={"ID":"8b71bebb-d9b6-4d26-8a46-eb83bb713af5","Type":"ContainerStarted","Data":"337130ffa549b13a14bbf5d2e98a1085c49dc9fd6fe78a65462b20557bafaa5d"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.183790 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.183945 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.185252 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.685234354 +0000 UTC m=+150.296464837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.209375 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:45 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:45 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:45 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.209425 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.215611 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" podStartSLOduration=125.215597965 podStartE2EDuration="2m5.215597965s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.213556483 +0000 UTC m=+149.824786966" watchObservedRunningTime="2025-12-13 07:06:45.215597965 +0000 UTC m=+149.826828448" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.216891 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" event={"ID":"e03d4bbe-6cd7-4c00-8238-e036fdb68171","Type":"ContainerStarted","Data":"4f0a2a366ca2e6cb59476b1d6713c84b4dbd92dac92d9b318268a65881779622"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.234088 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5dx2g" podStartSLOduration=125.234063025 podStartE2EDuration="2m5.234063025s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.179698166 +0000 UTC m=+149.790928649" watchObservedRunningTime="2025-12-13 07:06:45.234063025 +0000 UTC m=+149.845293508" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.280358 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" event={"ID":"4f40f667-1ba4-4725-ae8b-326082ca9cba","Type":"ContainerStarted","Data":"3540377daf0a0087ca143970a2a5a736b0353b989b16d42c5f4435c2db71150c"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.281953 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.291328 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.293094 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.793069046 +0000 UTC m=+150.404299529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.322936 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9p8cj" podStartSLOduration=9.322915091 podStartE2EDuration="9.322915091s" podCreationTimestamp="2025-12-13 07:06:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.321706715 +0000 UTC m=+149.932937218" watchObservedRunningTime="2025-12-13 07:06:45.322915091 +0000 UTC m=+149.934145574" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.337654 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" event={"ID":"78af3cea-e4f1-4604-95f7-07db771f23b6","Type":"ContainerStarted","Data":"be0dcf8b2b166b2ecd8b19bcb1b8c753c52cd713b27468948f18071b54b04f1f"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.365562 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.366423 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-l9qj7" event={"ID":"f276c387-4478-4a04-abb8-6b3db599690a","Type":"ContainerStarted","Data":"54142cf9159bf9ede49f794088c74d7a872fd58c462e161cf561cd45de243be0"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.397526 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.399475 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:45.899463755 +0000 UTC m=+150.510694238 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.410716 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" event={"ID":"0c7fb422-f539-48c4-94c3-02602bb323d6","Type":"ContainerStarted","Data":"e671ceaaac67a0cc81cfa3904bd74331694caf6693f7e753714d459626dcb4ec"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.435361 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" podStartSLOduration=125.435342743 podStartE2EDuration="2m5.435342743s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.434010322 +0000 UTC m=+150.045240815" watchObservedRunningTime="2025-12-13 07:06:45.435342743 +0000 UTC m=+150.046573226" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.435733 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mq99q" podStartSLOduration=125.435727124 podStartE2EDuration="2m5.435727124s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.387342257 +0000 UTC m=+149.998572740" watchObservedRunningTime="2025-12-13 07:06:45.435727124 +0000 UTC m=+150.046957607" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.443549 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" event={"ID":"822753a6-f2ae-42df-adc6-0cb5113d1385","Type":"ContainerStarted","Data":"81b7fdd3ea03269b95c1e080d56524cdda748385f2bb4d448ba8c269a58c0f62"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.443591 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" event={"ID":"822753a6-f2ae-42df-adc6-0cb5113d1385","Type":"ContainerStarted","Data":"5756ab6deb41b347b9811056c449f41e5f75e3419727f6969462f52a59c72a55"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.444332 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.489334 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" event={"ID":"421cb8d9-2bc0-42a2-9c80-7676cecda20b","Type":"ContainerStarted","Data":"b6847feaa2d4d4e0e435b940238f1218fa475530261fed8822778144ec09f439"} Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.502736 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.506863 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.006838342 +0000 UTC m=+150.618068825 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.514588 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.516449 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.016437094 +0000 UTC m=+150.627667577 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.524317 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.616721 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.617666 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.117651554 +0000 UTC m=+150.728882037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.621786 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-cbq4r" podStartSLOduration=125.621765579 podStartE2EDuration="2m5.621765579s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.502319935 +0000 UTC m=+150.113550448" watchObservedRunningTime="2025-12-13 07:06:45.621765579 +0000 UTC m=+150.232996082" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.768759 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.769474 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.269458582 +0000 UTC m=+150.880689065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.781547 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-ztngj" podStartSLOduration=124.781527407 podStartE2EDuration="2m4.781527407s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.656321188 +0000 UTC m=+150.267551671" watchObservedRunningTime="2025-12-13 07:06:45.781527407 +0000 UTC m=+150.392757890" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.782514 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-cl5ds" podStartSLOduration=125.782507618 podStartE2EDuration="2m5.782507618s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:45.78026732 +0000 UTC m=+150.391497823" watchObservedRunningTime="2025-12-13 07:06:45.782507618 +0000 UTC m=+150.393738101" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.862840 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-sxsq6" Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.871717 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.872289 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.372270181 +0000 UTC m=+150.983500664 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:45 crc kubenswrapper[4733]: I1213 07:06:45.973028 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:45 crc kubenswrapper[4733]: E1213 07:06:45.973416 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.47339881 +0000 UTC m=+151.084629293 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.073735 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.074182 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.574164627 +0000 UTC m=+151.185395120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.159623 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" podStartSLOduration=125.15960666 podStartE2EDuration="2m5.15960666s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:46.157570059 +0000 UTC m=+150.768800542" watchObservedRunningTime="2025-12-13 07:06:46.15960666 +0000 UTC m=+150.770837143" Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.176667 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.176993 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.676978948 +0000 UTC m=+151.288209431 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.197292 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:46 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:46 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:46 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.197352 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.284100 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.284811 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.784795079 +0000 UTC m=+151.396025572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.306041 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zn4kv" podStartSLOduration=126.306015583 podStartE2EDuration="2m6.306015583s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:46.229931495 +0000 UTC m=+150.841161978" watchObservedRunningTime="2025-12-13 07:06:46.306015583 +0000 UTC m=+150.917246066" Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.385690 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.386066 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.886052132 +0000 UTC m=+151.497282615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.489747 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.490280 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:46.990260364 +0000 UTC m=+151.601490847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.556965 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" event={"ID":"8b71bebb-d9b6-4d26-8a46-eb83bb713af5","Type":"ContainerStarted","Data":"dc0842f8ea6e06f40aebe39b37cb4510c725302163a6136204c03da84153a22c"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.573656 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" event={"ID":"421cb8d9-2bc0-42a2-9c80-7676cecda20b","Type":"ContainerStarted","Data":"c503b5d944b8889f39da1dca1912e9d5b2162b24ee69ce2428fa89361490a4fb"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.573706 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" event={"ID":"421cb8d9-2bc0-42a2-9c80-7676cecda20b","Type":"ContainerStarted","Data":"82cecfee5de272a08168aa9ced80a489a955b47189d009b07fb31b427887de93"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.595561 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-z8h96" event={"ID":"5719f6d0-51b0-42ac-8db3-c32bb2d95e19","Type":"ContainerStarted","Data":"3b511b3eb15493c55c57905a764f9851a767d8941547fdc06d84d2340cba559e"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.595624 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-z8h96" event={"ID":"5719f6d0-51b0-42ac-8db3-c32bb2d95e19","Type":"ContainerStarted","Data":"37e2c848e8461b5d9276c49492ce91dec6252d0e24a72d8ed422e5372322fd86"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.596042 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.596499 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.096484148 +0000 UTC m=+151.707714631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.624524 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0bfb6f1578e906fb9dea903334938916db0f031a0834a65df2c72e67fdb48a89"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.624584 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"aaae6dbe8c5b4d492fe3c2a8b86c2fe6eab5166aafa5f2f36aa4a0e293e502b9"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.651959 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b027e500f65e57e104771b99205814ed1b3b97624b621bddcaac0c19437e5f51"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.652010 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c2be0c477cbe388f3cd51e52f6f089e32c91920f210aa5ed8a730fdcc88e60cd"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.691241 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" event={"ID":"e263441b-77f1-4f99-b1ba-419525173cf0","Type":"ContainerStarted","Data":"4dd4ceb24837bd449c425fe3d677d270c085405258ec1022b6a044ba6e60fa3e"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.698664 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.699805 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.199787842 +0000 UTC m=+151.811018335 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.716982 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" event={"ID":"ae37ee7f-716a-49c6-938c-30da6af9ba92","Type":"ContainerStarted","Data":"4dc16949cc6ce9f84a0f72e1889e5177123c12020dd815b6be90622c6ba69f46"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.753040 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" event={"ID":"822753a6-f2ae-42df-adc6-0cb5113d1385","Type":"ContainerStarted","Data":"1a91e55b7738782b85352298b074cb44d3f678e26bb3322b2e82877ce120ff16"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.761383 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" event={"ID":"404840cf-dd63-4b5f-bde2-49af2c7ace66","Type":"ContainerStarted","Data":"b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.761413 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" event={"ID":"404840cf-dd63-4b5f-bde2-49af2c7ace66","Type":"ContainerStarted","Data":"5f32a44cb36dde1a8670357adaf36b7ed5fd680e2cdd6f09f6a4b237dd56b8f4"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.762083 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.778106 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3e58b485a15cf0bf8e77cb38ed4b3e11474e1e62e26b57ab0b5f81d0489159cc"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.778151 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"598db581004116486e164b5a7a4a74004103bf28d5c7fe6594d57f3002cd101a"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.785982 4733 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k5dwn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.786034 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.806902 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.808597 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.308584063 +0000 UTC m=+151.919814546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.808890 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" event={"ID":"0cb4ca5e-83af-4dd2-aecf-5759e90f9b77","Type":"ContainerStarted","Data":"7ca861881955fc0ac0fe591163a41da5a023daec4cf7c2b9846059659701046e"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.841918 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" event={"ID":"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995","Type":"ContainerStarted","Data":"3645dd38d7b83a51c56809ffba74ebe78f7897912867abaed910e6bde528d675"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.843744 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" event={"ID":"0c7fb422-f539-48c4-94c3-02602bb323d6","Type":"ContainerStarted","Data":"c3c2c421e65f095f40f2ee6d18e4542b0c00e464cfd1f2d7b8b120cda3fa10da"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.845546 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" event={"ID":"e4272f28-ff3e-4543-95fc-322a430ef74a","Type":"ContainerStarted","Data":"1a1fe01a80f28d2625007c6384d5859ebbe35d9be632045c1e6a372fe8721fdb"} Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.907464 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.907662 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.407636299 +0000 UTC m=+152.018866782 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:46 crc kubenswrapper[4733]: I1213 07:06:46.908138 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:46 crc kubenswrapper[4733]: E1213 07:06:46.909453 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.409438414 +0000 UTC m=+152.020668967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.009368 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.010262 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.510242723 +0000 UTC m=+152.121473206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.057568 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l66ck" podStartSLOduration=126.057550478 podStartE2EDuration="2m6.057550478s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.056108785 +0000 UTC m=+151.667339268" watchObservedRunningTime="2025-12-13 07:06:47.057550478 +0000 UTC m=+151.668780961" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.058296 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-lbkq2" podStartSLOduration=126.0582908 podStartE2EDuration="2m6.0582908s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.008821199 +0000 UTC m=+151.620051693" watchObservedRunningTime="2025-12-13 07:06:47.0582908 +0000 UTC m=+151.669521273" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.063004 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jm9wb"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.063967 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.080018 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.111338 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.112011 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.611996601 +0000 UTC m=+152.223227084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.122116 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jm9wb"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.128408 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7x9q8"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.129401 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.136444 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.169979 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7x9q8"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.201456 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:47 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:47 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:47 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.201504 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.213017 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.213183 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wphw6\" (UniqueName: \"kubernetes.io/projected/11a45191-a277-4060-8dfc-4c577c6d2702-kube-api-access-wphw6\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.213232 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-catalog-content\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.213255 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.213301 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-utilities\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.213323 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t86hj\" (UniqueName: \"kubernetes.io/projected/1f0dfb11-f02d-4007-8275-c92f802f9010-kube-api-access-t86hj\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.213342 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-utilities\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.213428 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.713412768 +0000 UTC m=+152.324643241 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.312723 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-t82l8" podStartSLOduration=127.312707981 podStartE2EDuration="2m7.312707981s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.301066528 +0000 UTC m=+151.912297001" watchObservedRunningTime="2025-12-13 07:06:47.312707981 +0000 UTC m=+151.923938454" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314040 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314082 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314132 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-utilities\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314164 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t86hj\" (UniqueName: \"kubernetes.io/projected/1f0dfb11-f02d-4007-8275-c92f802f9010-kube-api-access-t86hj\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314188 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-utilities\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314254 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wphw6\" (UniqueName: \"kubernetes.io/projected/11a45191-a277-4060-8dfc-4c577c6d2702-kube-api-access-wphw6\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314283 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-catalog-content\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314573 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.314578 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.814556367 +0000 UTC m=+152.425786920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314755 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-catalog-content\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.314901 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-utilities\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.315110 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-utilities\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.325882 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-w5cq4"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.327312 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.350825 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t86hj\" (UniqueName: \"kubernetes.io/projected/1f0dfb11-f02d-4007-8275-c92f802f9010-kube-api-access-t86hj\") pod \"community-operators-7x9q8\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.369663 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w5cq4"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.377951 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" podStartSLOduration=127.37793192 podStartE2EDuration="2m7.37793192s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.377491756 +0000 UTC m=+151.988722249" watchObservedRunningTime="2025-12-13 07:06:47.37793192 +0000 UTC m=+151.989162403" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.397720 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wphw6\" (UniqueName: \"kubernetes.io/projected/11a45191-a277-4060-8dfc-4c577c6d2702-kube-api-access-wphw6\") pod \"certified-operators-jm9wb\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.410522 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.410872 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-nlspd" podStartSLOduration=126.410853409 podStartE2EDuration="2m6.410853409s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.410704875 +0000 UTC m=+152.021935358" watchObservedRunningTime="2025-12-13 07:06:47.410853409 +0000 UTC m=+152.022083892" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.421894 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.422100 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-utilities\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.422226 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.922191303 +0000 UTC m=+152.533421786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.422377 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-catalog-content\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.422566 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brclb\" (UniqueName: \"kubernetes.io/projected/f095c8b8-7de4-40cc-baf2-513801c2cd89-kube-api-access-brclb\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.422674 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.423127 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:47.923111071 +0000 UTC m=+152.534341554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.452714 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.503342 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" podStartSLOduration=126.503323765 podStartE2EDuration="2m6.503323765s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.501995215 +0000 UTC m=+152.113225698" watchObservedRunningTime="2025-12-13 07:06:47.503323765 +0000 UTC m=+152.114554248" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.521022 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gvt9c"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.521996 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.523399 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.523672 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-catalog-content\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.523704 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brclb\" (UniqueName: \"kubernetes.io/projected/f095c8b8-7de4-40cc-baf2-513801c2cd89-kube-api-access-brclb\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.523731 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-utilities\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.524098 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-utilities\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.524163 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.024149767 +0000 UTC m=+152.635380250 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.524396 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-catalog-content\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.549476 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brclb\" (UniqueName: \"kubernetes.io/projected/f095c8b8-7de4-40cc-baf2-513801c2cd89-kube-api-access-brclb\") pod \"certified-operators-w5cq4\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.552585 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gvt9c"] Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.567400 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tfv8z" podStartSLOduration=127.567378289 podStartE2EDuration="2m7.567378289s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.556473988 +0000 UTC m=+152.167704471" watchObservedRunningTime="2025-12-13 07:06:47.567378289 +0000 UTC m=+152.178608792" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.635967 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f57n2\" (UniqueName: \"kubernetes.io/projected/3bc7441a-bd67-4b1e-a799-684eb9e4463d-kube-api-access-f57n2\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.636052 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.636110 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-utilities\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.636142 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-catalog-content\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.636620 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.136604069 +0000 UTC m=+152.747834562 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.647487 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.664677 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wh4vt" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.747756 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.747936 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f57n2\" (UniqueName: \"kubernetes.io/projected/3bc7441a-bd67-4b1e-a799-684eb9e4463d-kube-api-access-f57n2\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.747988 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-utilities\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.748004 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-catalog-content\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.748413 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-catalog-content\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.748475 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.248461324 +0000 UTC m=+152.859691807 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.748892 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-utilities\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.799581 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f57n2\" (UniqueName: \"kubernetes.io/projected/3bc7441a-bd67-4b1e-a799-684eb9e4463d-kube-api-access-f57n2\") pod \"community-operators-gvt9c\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.849282 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.849563 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.349552611 +0000 UTC m=+152.960783094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.851532 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.898889 4733 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k5dwn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.898952 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.899495 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:47 crc kubenswrapper[4733]: I1213 07:06:47.958722 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:47 crc kubenswrapper[4733]: E1213 07:06:47.959084 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.459066605 +0000 UTC m=+153.070297088 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.060680 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.066580 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.566566757 +0000 UTC m=+153.177797240 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.170955 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.171360 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.671344166 +0000 UTC m=+153.282574649 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.200034 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:48 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:48 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:48 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.200085 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.278441 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.278741 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.778730535 +0000 UTC m=+153.389961008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.380035 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.380189 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.880158192 +0000 UTC m=+153.491388695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.380637 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.380931 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.880919466 +0000 UTC m=+153.492149949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.413260 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-z8h96" podStartSLOduration=12.413236996 podStartE2EDuration="12.413236996s" podCreationTimestamp="2025-12-13 07:06:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:47.927550538 +0000 UTC m=+152.538781031" watchObservedRunningTime="2025-12-13 07:06:48.413236996 +0000 UTC m=+153.024467479" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.414672 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jm9wb"] Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.483724 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.484042 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:48.984026715 +0000 UTC m=+153.595257198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.587073 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.587465 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.087450693 +0000 UTC m=+153.698681176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.667494 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7x9q8"] Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.689280 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.689883 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.189868691 +0000 UTC m=+153.801099174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.696388 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.696450 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.701789 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w5cq4"] Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.745374 4733 patch_prober.go:28] interesting pod/apiserver-76f77b778f-kzjdj container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]log ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]etcd ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/generic-apiserver-start-informers ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/max-in-flight-filter ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 13 07:06:48 crc kubenswrapper[4733]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 13 07:06:48 crc kubenswrapper[4733]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/project.openshift.io-projectcache ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/openshift.io-startinformers ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 13 07:06:48 crc kubenswrapper[4733]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 13 07:06:48 crc kubenswrapper[4733]: livez check failed Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.745425 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" podUID="0cb4ca5e-83af-4dd2-aecf-5759e90f9b77" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.767070 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vsc8k" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.790754 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.791141 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.291126674 +0000 UTC m=+153.902357167 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.873415 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gvt9c"] Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.898101 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:48 crc kubenswrapper[4733]: E1213 07:06:48.899751 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.39973216 +0000 UTC m=+154.010962643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.910879 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" event={"ID":"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995","Type":"ContainerStarted","Data":"222e731623d337f401fac5c7fe1f6e8ac2b925516d57a53350d381bb310ab986"} Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.923489 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5cq4" event={"ID":"f095c8b8-7de4-40cc-baf2-513801c2cd89","Type":"ContainerStarted","Data":"15d116b3249873b88c3d49a117a288812ceab778e0fc949e54b63bd829ad8d09"} Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.929554 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x9q8" event={"ID":"1f0dfb11-f02d-4007-8275-c92f802f9010","Type":"ContainerStarted","Data":"dccd6753a1a470520e0390a156180a2054811a89cb0d9ef73d707311d9a1d325"} Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.930731 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" event={"ID":"0c7fb422-f539-48c4-94c3-02602bb323d6","Type":"ContainerStarted","Data":"27097ffc26dea0eaa3b05565d0a8984e8c3e685f26b13f9f3cc94a3cb40ce456"} Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.931639 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9xkkt"] Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.932703 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.948403 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.948588 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xkkt"] Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.952632 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" event={"ID":"e4272f28-ff3e-4543-95fc-322a430ef74a","Type":"ContainerStarted","Data":"cc81cd91673233e917be4fdd4fbdd99ec439e6f1a641095e6cb715e54fc721c3"} Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.957111 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jm9wb" event={"ID":"11a45191-a277-4060-8dfc-4c577c6d2702","Type":"ContainerStarted","Data":"8ffab240b6f7f47417ce1d1864f8de114cec46b9ff8c5350c687d066fc4654d5"} Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.957449 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.958145 4733 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-k5dwn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.958169 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 13 07:06:48 crc kubenswrapper[4733]: I1213 07:06:48.978607 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-g9699" podStartSLOduration=127.978592713 podStartE2EDuration="2m7.978592713s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:48.977084457 +0000 UTC m=+153.588314940" watchObservedRunningTime="2025-12-13 07:06:48.978592713 +0000 UTC m=+153.589823196" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.010349 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.010912 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.510873742 +0000 UTC m=+154.122104225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.094319 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-975kj" podStartSLOduration=128.094296213 podStartE2EDuration="2m8.094296213s" podCreationTimestamp="2025-12-13 07:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:49.066252942 +0000 UTC m=+153.677483425" watchObservedRunningTime="2025-12-13 07:06:49.094296213 +0000 UTC m=+153.705526706" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.111449 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.111586 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.611564618 +0000 UTC m=+154.222795101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.111746 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-catalog-content\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.115013 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.115566 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-utilities\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.115615 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khb94\" (UniqueName: \"kubernetes.io/projected/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-kube-api-access-khb94\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.116082 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.616068984 +0000 UTC m=+154.227299457 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.194058 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.213394 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:49 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:49 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:49 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.213703 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.216794 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.216969 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-utilities\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.216998 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khb94\" (UniqueName: \"kubernetes.io/projected/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-kube-api-access-khb94\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.217041 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-catalog-content\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.217323 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.717296376 +0000 UTC m=+154.328526859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.217423 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-catalog-content\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.218039 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-utilities\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.244790 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khb94\" (UniqueName: \"kubernetes.io/projected/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-kube-api-access-khb94\") pod \"redhat-marketplace-9xkkt\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.245292 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.245422 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.270462 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.320485 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.321950 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.821938151 +0000 UTC m=+154.433168634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.327165 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bjj7h"] Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.328246 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.343641 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjj7h"] Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.421951 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.422785 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:49.922769971 +0000 UTC m=+154.534000454 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.483347 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.492013 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.492081 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.492026 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.492190 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.523116 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlmh7\" (UniqueName: \"kubernetes.io/projected/93072069-7e83-44f8-a145-e4d481eeb3de-kube-api-access-zlmh7\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.523165 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.523189 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-catalog-content\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.523276 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-utilities\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.523499 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.023482677 +0000 UTC m=+154.634713160 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.623840 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.624057 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.124025538 +0000 UTC m=+154.735256031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.624426 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-utilities\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.624468 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlmh7\" (UniqueName: \"kubernetes.io/projected/93072069-7e83-44f8-a145-e4d481eeb3de-kube-api-access-zlmh7\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.624505 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.624524 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-catalog-content\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.625000 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-catalog-content\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.625019 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.124997627 +0000 UTC m=+154.736228110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.625272 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-utilities\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.726246 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.726687 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.226669773 +0000 UTC m=+154.837900256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.726884 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlmh7\" (UniqueName: \"kubernetes.io/projected/93072069-7e83-44f8-a145-e4d481eeb3de-kube-api-access-zlmh7\") pod \"redhat-marketplace-bjj7h\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.827821 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.828398 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.32838686 +0000 UTC m=+154.939617343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.916517 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xkkt"] Dec 13 07:06:49 crc kubenswrapper[4733]: W1213 07:06:49.926662 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce65638e_1ce3_4625_ad9b_2dd7af81efc4.slice/crio-085de72efb2700882a7dc030c16518c4725ca833a35317ca95bc8842f2700490 WatchSource:0}: Error finding container 085de72efb2700882a7dc030c16518c4725ca833a35317ca95bc8842f2700490: Status 404 returned error can't find the container with id 085de72efb2700882a7dc030c16518c4725ca833a35317ca95bc8842f2700490 Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.929044 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:49 crc kubenswrapper[4733]: E1213 07:06:49.929392 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.429376534 +0000 UTC m=+155.040607017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.964002 4733 generic.go:334] "Generic (PLEG): container finished" podID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerID="34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f" exitCode=0 Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.964064 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvt9c" event={"ID":"3bc7441a-bd67-4b1e-a799-684eb9e4463d","Type":"ContainerDied","Data":"34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f"} Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.964122 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvt9c" event={"ID":"3bc7441a-bd67-4b1e-a799-684eb9e4463d","Type":"ContainerStarted","Data":"16178ef2fea67566a1f1d3be4c2fd25926182b01f7b3f06789f7f25f0ac5213e"} Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.966522 4733 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.968437 4733 generic.go:334] "Generic (PLEG): container finished" podID="11a45191-a277-4060-8dfc-4c577c6d2702" containerID="b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f" exitCode=0 Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.968509 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jm9wb" event={"ID":"11a45191-a277-4060-8dfc-4c577c6d2702","Type":"ContainerDied","Data":"b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f"} Dec 13 07:06:49 crc kubenswrapper[4733]: I1213 07:06:49.984358 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xkkt" event={"ID":"ce65638e-1ce3-4625-ad9b-2dd7af81efc4","Type":"ContainerStarted","Data":"085de72efb2700882a7dc030c16518c4725ca833a35317ca95bc8842f2700490"} Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.013577 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.015728 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.015775 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.018500 4733 generic.go:334] "Generic (PLEG): container finished" podID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerID="2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e" exitCode=0 Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.018739 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5cq4" event={"ID":"f095c8b8-7de4-40cc-baf2-513801c2cd89","Type":"ContainerDied","Data":"2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e"} Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.023159 4733 patch_prober.go:28] interesting pod/console-f9d7485db-z4czb container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.023225 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-z4czb" podUID="209b5640-5db3-46dc-8dd1-1a302a385526" containerName="console" probeResult="failure" output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.025358 4733 generic.go:334] "Generic (PLEG): container finished" podID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerID="aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a" exitCode=0 Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.026510 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x9q8" event={"ID":"1f0dfb11-f02d-4007-8275-c92f802f9010","Type":"ContainerDied","Data":"aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a"} Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.030385 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.030708 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.530693328 +0000 UTC m=+155.141923811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.033010 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4vqnd" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.124774 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wglbz"] Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.126035 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.131777 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.132529 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.632513638 +0000 UTC m=+155.243744121 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.133015 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.137121 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.637107337 +0000 UTC m=+155.248337820 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.144355 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.154566 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wglbz"] Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.194266 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:50 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:50 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:50 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.194333 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.208708 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.233581 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.233854 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-catalog-content\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.233884 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-utilities\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.233922 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6q8p\" (UniqueName: \"kubernetes.io/projected/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-kube-api-access-b6q8p\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.234023 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.734009277 +0000 UTC m=+155.345239760 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.342909 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6q8p\" (UniqueName: \"kubernetes.io/projected/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-kube-api-access-b6q8p\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.343312 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.343349 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-catalog-content\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.343427 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-utilities\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.343914 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-utilities\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.344681 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.844668495 +0000 UTC m=+155.455898988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.345056 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-catalog-content\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.374120 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6q8p\" (UniqueName: \"kubernetes.io/projected/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-kube-api-access-b6q8p\") pod \"redhat-operators-wglbz\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.444984 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.445385 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:50.945364631 +0000 UTC m=+155.556595114 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.465637 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.527262 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wqqsg"] Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.528621 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.529784 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wqqsg"] Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.536838 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjj7h"] Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.546562 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.546948 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.046933613 +0000 UTC m=+155.658164096 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: W1213 07:06:50.551422 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93072069_7e83_44f8_a145_e4d481eeb3de.slice/crio-dcd79a0f431fd8be81af3b3e47c675d7589cd28aa8e46aa2da2b651cda2f1fac WatchSource:0}: Error finding container dcd79a0f431fd8be81af3b3e47c675d7589cd28aa8e46aa2da2b651cda2f1fac: Status 404 returned error can't find the container with id dcd79a0f431fd8be81af3b3e47c675d7589cd28aa8e46aa2da2b651cda2f1fac Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.647967 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.648147 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.148118133 +0000 UTC m=+155.759348616 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.648264 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-catalog-content\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.648347 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.648381 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-utilities\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.648417 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqrz2\" (UniqueName: \"kubernetes.io/projected/3851778b-0b71-4b8f-b562-0d8f9bb86e75-kube-api-access-wqrz2\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.648798 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.148782614 +0000 UTC m=+155.760013097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.685369 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wglbz"] Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.749800 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.749991 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-catalog-content\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.750016 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.249984265 +0000 UTC m=+155.861214748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.750245 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.750285 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-utilities\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.750346 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqrz2\" (UniqueName: \"kubernetes.io/projected/3851778b-0b71-4b8f-b562-0d8f9bb86e75-kube-api-access-wqrz2\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.750487 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-catalog-content\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.750783 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-utilities\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.751100 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.251087688 +0000 UTC m=+155.862318171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.768639 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqrz2\" (UniqueName: \"kubernetes.io/projected/3851778b-0b71-4b8f-b562-0d8f9bb86e75-kube-api-access-wqrz2\") pod \"redhat-operators-wqqsg\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.848843 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.851802 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.852157 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.352143115 +0000 UTC m=+155.963373598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:50 crc kubenswrapper[4733]: I1213 07:06:50.953806 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:50 crc kubenswrapper[4733]: E1213 07:06:50.954245 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.454228172 +0000 UTC m=+156.065458655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.055435 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.055631 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.555605478 +0000 UTC m=+156.166835961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.055677 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.056013 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.556005671 +0000 UTC m=+156.167236234 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.156722 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.156902 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.656880962 +0000 UTC m=+156.268111445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.156938 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.157257 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.657246943 +0000 UTC m=+156.268477426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.193382 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:51 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:51 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:51 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.193440 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.257651 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.257789 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.757772464 +0000 UTC m=+156.369002947 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.258039 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.258394 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.758384262 +0000 UTC m=+156.369614755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: W1213 07:06:51.298124 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13d8b178_e08a_43ec_a5ed_5a3c8816f7f7.slice/crio-d15d67abaf3226e947ca9b7c6ea993db89714ce449c76d8c9e3600df6832f962 WatchSource:0}: Error finding container d15d67abaf3226e947ca9b7c6ea993db89714ce449c76d8c9e3600df6832f962: Status 404 returned error can't find the container with id d15d67abaf3226e947ca9b7c6ea993db89714ce449c76d8c9e3600df6832f962 Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.317116 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjj7h" event={"ID":"93072069-7e83-44f8-a145-e4d481eeb3de","Type":"ContainerStarted","Data":"dcd79a0f431fd8be81af3b3e47c675d7589cd28aa8e46aa2da2b651cda2f1fac"} Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.326848 4733 generic.go:334] "Generic (PLEG): container finished" podID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerID="c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2" exitCode=0 Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.327116 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xkkt" event={"ID":"ce65638e-1ce3-4625-ad9b-2dd7af81efc4","Type":"ContainerDied","Data":"c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2"} Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.359160 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.359926 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.859904003 +0000 UTC m=+156.471134496 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.420696 4733 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.461700 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.462819 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:51.962803335 +0000 UTC m=+156.574033818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.562663 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.562992 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:52.062977615 +0000 UTC m=+156.674208098 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.664267 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.664724 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:52.164699601 +0000 UTC m=+156.775930084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.711052 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wqqsg"] Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.768958 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.769414 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:52.269396788 +0000 UTC m=+156.880627271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.873364 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.873788 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-13 07:06:52.373774136 +0000 UTC m=+156.985004619 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-278t5" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.957732 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.957807 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.960339 4733 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-13T07:06:51.420983126Z","Handler":null,"Name":""} Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.974470 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:51 crc kubenswrapper[4733]: E1213 07:06:51.974575 4733 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-13 07:06:52.474545364 +0000 UTC m=+157.085775857 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.974765 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.974993 4733 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 13 07:06:51 crc kubenswrapper[4733]: I1213 07:06:51.975055 4733 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.197428 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:52 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:52 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:52 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.197816 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.354595 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wqqsg" event={"ID":"3851778b-0b71-4b8f-b562-0d8f9bb86e75","Type":"ContainerStarted","Data":"8d161174b2c7859e3bfa1c6662977c5a4122bc5bcbd739dbfa38e4846e898ec1"} Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.370510 4733 generic.go:334] "Generic (PLEG): container finished" podID="93072069-7e83-44f8-a145-e4d481eeb3de" containerID="1b9442d98c86b3f3d03cdabbfe2bc2d426b5950babb38dd833f0df515ffdfe33" exitCode=0 Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.370794 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjj7h" event={"ID":"93072069-7e83-44f8-a145-e4d481eeb3de","Type":"ContainerDied","Data":"1b9442d98c86b3f3d03cdabbfe2bc2d426b5950babb38dd833f0df515ffdfe33"} Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.374355 4733 generic.go:334] "Generic (PLEG): container finished" podID="e263441b-77f1-4f99-b1ba-419525173cf0" containerID="4dd4ceb24837bd449c425fe3d677d270c085405258ec1022b6a044ba6e60fa3e" exitCode=0 Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.374415 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" event={"ID":"e263441b-77f1-4f99-b1ba-419525173cf0","Type":"ContainerDied","Data":"4dd4ceb24837bd449c425fe3d677d270c085405258ec1022b6a044ba6e60fa3e"} Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.379906 4733 generic.go:334] "Generic (PLEG): container finished" podID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerID="b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c" exitCode=0 Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.379981 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wglbz" event={"ID":"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7","Type":"ContainerDied","Data":"b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c"} Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.380016 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wglbz" event={"ID":"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7","Type":"ContainerStarted","Data":"d15d67abaf3226e947ca9b7c6ea993db89714ce449c76d8c9e3600df6832f962"} Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.385472 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" event={"ID":"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995","Type":"ContainerStarted","Data":"7a4835fa798c57cecacfb44615f7e316c9ac8fff01836757b043cab8f39b6c03"} Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.385640 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" event={"ID":"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995","Type":"ContainerStarted","Data":"2e5061588fec679e2f625429f885a1057698331cc5fad51dd35c4ed67d673f02"} Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.675196 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.678777 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.681816 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.687700 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.688638 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.725788 4733 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.725855 4733 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.785891 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-278t5\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.791029 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.791386 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5596cd7-0096-4c45-bfaa-28fb6467b457-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.791496 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5596cd7-0096-4c45-bfaa-28fb6467b457-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.803797 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.895161 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5596cd7-0096-4c45-bfaa-28fb6467b457-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.895567 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5596cd7-0096-4c45-bfaa-28fb6467b457-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.895266 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5596cd7-0096-4c45-bfaa-28fb6467b457-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.925076 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:52 crc kubenswrapper[4733]: I1213 07:06:52.969587 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5596cd7-0096-4c45-bfaa-28fb6467b457-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.020070 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.194186 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:53 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:53 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:53 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.194557 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.400739 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-278t5"] Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.410551 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" event={"ID":"45abc2f0-f3bd-462e-8b4a-9c6ed7c30995","Type":"ContainerStarted","Data":"04846d41b51a63f5671663f12b3b1b3d64c544feafe9ff55ccaf4643c84d3642"} Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.412894 4733 generic.go:334] "Generic (PLEG): container finished" podID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerID="a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c" exitCode=0 Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.413756 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wqqsg" event={"ID":"3851778b-0b71-4b8f-b562-0d8f9bb86e75","Type":"ContainerDied","Data":"a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c"} Dec 13 07:06:53 crc kubenswrapper[4733]: W1213 07:06:53.422878 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64178ade_8a25_4622_91a4_d09ae57fadf1.slice/crio-d5eae76ba1613800ac612f57c8b8281e3138813337b0f7ac7756d921c2e90c56 WatchSource:0}: Error finding container d5eae76ba1613800ac612f57c8b8281e3138813337b0f7ac7756d921c2e90c56: Status 404 returned error can't find the container with id d5eae76ba1613800ac612f57c8b8281e3138813337b0f7ac7756d921c2e90c56 Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.443761 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" podStartSLOduration=17.443744356 podStartE2EDuration="17.443744356s" podCreationTimestamp="2025-12-13 07:06:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:53.440825147 +0000 UTC m=+158.052055640" watchObservedRunningTime="2025-12-13 07:06:53.443744356 +0000 UTC m=+158.054974849" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.447660 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 13 07:06:53 crc kubenswrapper[4733]: W1213 07:06:53.500331 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda5596cd7_0096_4c45_bfaa_28fb6467b457.slice/crio-a13a6c355991f038c1fe3efde71891b5ff15abd20d239bdd9956d96f0bd3e16f WatchSource:0}: Error finding container a13a6c355991f038c1fe3efde71891b5ff15abd20d239bdd9956d96f0bd3e16f: Status 404 returned error can't find the container with id a13a6c355991f038c1fe3efde71891b5ff15abd20d239bdd9956d96f0bd3e16f Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.622513 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.695700 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.706466 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-kzjdj" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.738133 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.922467 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e263441b-77f1-4f99-b1ba-419525173cf0-config-volume\") pod \"e263441b-77f1-4f99-b1ba-419525173cf0\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.922675 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e263441b-77f1-4f99-b1ba-419525173cf0-secret-volume\") pod \"e263441b-77f1-4f99-b1ba-419525173cf0\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.922722 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x75z\" (UniqueName: \"kubernetes.io/projected/e263441b-77f1-4f99-b1ba-419525173cf0-kube-api-access-8x75z\") pod \"e263441b-77f1-4f99-b1ba-419525173cf0\" (UID: \"e263441b-77f1-4f99-b1ba-419525173cf0\") " Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.930680 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e263441b-77f1-4f99-b1ba-419525173cf0-config-volume" (OuterVolumeSpecName: "config-volume") pod "e263441b-77f1-4f99-b1ba-419525173cf0" (UID: "e263441b-77f1-4f99-b1ba-419525173cf0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.934702 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e263441b-77f1-4f99-b1ba-419525173cf0-kube-api-access-8x75z" (OuterVolumeSpecName: "kube-api-access-8x75z") pod "e263441b-77f1-4f99-b1ba-419525173cf0" (UID: "e263441b-77f1-4f99-b1ba-419525173cf0"). InnerVolumeSpecName "kube-api-access-8x75z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:06:53 crc kubenswrapper[4733]: I1213 07:06:53.942233 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e263441b-77f1-4f99-b1ba-419525173cf0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e263441b-77f1-4f99-b1ba-419525173cf0" (UID: "e263441b-77f1-4f99-b1ba-419525173cf0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.032362 4733 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e263441b-77f1-4f99-b1ba-419525173cf0-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.032421 4733 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e263441b-77f1-4f99-b1ba-419525173cf0-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.032434 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x75z\" (UniqueName: \"kubernetes.io/projected/e263441b-77f1-4f99-b1ba-419525173cf0-kube-api-access-8x75z\") on node \"crc\" DevicePath \"\"" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.211293 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:54 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:54 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:54 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.211377 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.271731 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 07:06:54 crc kubenswrapper[4733]: E1213 07:06:54.272451 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e263441b-77f1-4f99-b1ba-419525173cf0" containerName="collect-profiles" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.272477 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="e263441b-77f1-4f99-b1ba-419525173cf0" containerName="collect-profiles" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.272614 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="e263441b-77f1-4f99-b1ba-419525173cf0" containerName="collect-profiles" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.273170 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.274558 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.279977 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.280811 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.447011 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.447075 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.498577 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" event={"ID":"e263441b-77f1-4f99-b1ba-419525173cf0","Type":"ContainerDied","Data":"e57eade9bcfeaaaaf2dbf15cd0e9c550ed82636cb8aa3195ac9e4bdcc46f4e9c"} Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.498619 4733 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e57eade9bcfeaaaaf2dbf15cd0e9c550ed82636cb8aa3195ac9e4bdcc46f4e9c" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.498708 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426820-srlnz" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.504932 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" event={"ID":"64178ade-8a25-4622-91a4-d09ae57fadf1","Type":"ContainerStarted","Data":"5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8"} Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.504983 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" event={"ID":"64178ade-8a25-4622-91a4-d09ae57fadf1","Type":"ContainerStarted","Data":"d5eae76ba1613800ac612f57c8b8281e3138813337b0f7ac7756d921c2e90c56"} Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.505789 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.512477 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a5596cd7-0096-4c45-bfaa-28fb6467b457","Type":"ContainerStarted","Data":"f94beca4a7e34d965d5b7107a6dd59e284491cb40f2a0bc2b85444649c369a31"} Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.512506 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a5596cd7-0096-4c45-bfaa-28fb6467b457","Type":"ContainerStarted","Data":"a13a6c355991f038c1fe3efde71891b5ff15abd20d239bdd9956d96f0bd3e16f"} Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.548370 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.548421 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.548489 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.556834 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.556804782 podStartE2EDuration="2.556804782s" podCreationTimestamp="2025-12-13 07:06:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:54.552882292 +0000 UTC m=+159.164112775" watchObservedRunningTime="2025-12-13 07:06:54.556804782 +0000 UTC m=+159.168035265" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.557839 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" podStartSLOduration=134.557832143 podStartE2EDuration="2m14.557832143s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:54.533136043 +0000 UTC m=+159.144366546" watchObservedRunningTime="2025-12-13 07:06:54.557832143 +0000 UTC m=+159.169062626" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.583240 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:54 crc kubenswrapper[4733]: I1213 07:06:54.614743 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:06:55 crc kubenswrapper[4733]: I1213 07:06:55.194376 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:55 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:55 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:55 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:55 crc kubenswrapper[4733]: I1213 07:06:55.194721 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:55 crc kubenswrapper[4733]: I1213 07:06:55.288491 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-z8h96" Dec 13 07:06:55 crc kubenswrapper[4733]: I1213 07:06:55.322525 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 13 07:06:55 crc kubenswrapper[4733]: W1213 07:06:55.461665 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod5a7d4459_92b8_44fb_b3f0_e7c5c8e0836d.slice/crio-54c3368aa813c77b2b2e3616ebdd13e48f388d4d014fb60c44496d7a73c6f3d4 WatchSource:0}: Error finding container 54c3368aa813c77b2b2e3616ebdd13e48f388d4d014fb60c44496d7a73c6f3d4: Status 404 returned error can't find the container with id 54c3368aa813c77b2b2e3616ebdd13e48f388d4d014fb60c44496d7a73c6f3d4 Dec 13 07:06:55 crc kubenswrapper[4733]: I1213 07:06:55.607094 4733 generic.go:334] "Generic (PLEG): container finished" podID="a5596cd7-0096-4c45-bfaa-28fb6467b457" containerID="f94beca4a7e34d965d5b7107a6dd59e284491cb40f2a0bc2b85444649c369a31" exitCode=0 Dec 13 07:06:55 crc kubenswrapper[4733]: I1213 07:06:55.636917 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a5596cd7-0096-4c45-bfaa-28fb6467b457","Type":"ContainerDied","Data":"f94beca4a7e34d965d5b7107a6dd59e284491cb40f2a0bc2b85444649c369a31"} Dec 13 07:06:55 crc kubenswrapper[4733]: I1213 07:06:55.636957 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d","Type":"ContainerStarted","Data":"54c3368aa813c77b2b2e3616ebdd13e48f388d4d014fb60c44496d7a73c6f3d4"} Dec 13 07:06:56 crc kubenswrapper[4733]: I1213 07:06:56.207389 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:56 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:56 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:56 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:56 crc kubenswrapper[4733]: I1213 07:06:56.207470 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:56 crc kubenswrapper[4733]: I1213 07:06:56.647334 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d","Type":"ContainerStarted","Data":"24f01b7ba14f5e3d121bf06c078f6991a7a88a45545b7a603a6adf310e434228"} Dec 13 07:06:56 crc kubenswrapper[4733]: I1213 07:06:56.677953 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.677936336 podStartE2EDuration="2.677936336s" podCreationTimestamp="2025-12-13 07:06:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:06:56.675653396 +0000 UTC m=+161.286883879" watchObservedRunningTime="2025-12-13 07:06:56.677936336 +0000 UTC m=+161.289166819" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.072114 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.197123 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:57 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:57 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:57 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.197185 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.233619 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5596cd7-0096-4c45-bfaa-28fb6467b457-kube-api-access\") pod \"a5596cd7-0096-4c45-bfaa-28fb6467b457\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.233865 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5596cd7-0096-4c45-bfaa-28fb6467b457-kubelet-dir\") pod \"a5596cd7-0096-4c45-bfaa-28fb6467b457\" (UID: \"a5596cd7-0096-4c45-bfaa-28fb6467b457\") " Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.234471 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5596cd7-0096-4c45-bfaa-28fb6467b457-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a5596cd7-0096-4c45-bfaa-28fb6467b457" (UID: "a5596cd7-0096-4c45-bfaa-28fb6467b457"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.260603 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5596cd7-0096-4c45-bfaa-28fb6467b457-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a5596cd7-0096-4c45-bfaa-28fb6467b457" (UID: "a5596cd7-0096-4c45-bfaa-28fb6467b457"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.335382 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5596cd7-0096-4c45-bfaa-28fb6467b457-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.335417 4733 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5596cd7-0096-4c45-bfaa-28fb6467b457-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.696734 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.697452 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a5596cd7-0096-4c45-bfaa-28fb6467b457","Type":"ContainerDied","Data":"a13a6c355991f038c1fe3efde71891b5ff15abd20d239bdd9956d96f0bd3e16f"} Dec 13 07:06:57 crc kubenswrapper[4733]: I1213 07:06:57.697481 4733 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a13a6c355991f038c1fe3efde71891b5ff15abd20d239bdd9956d96f0bd3e16f" Dec 13 07:06:58 crc kubenswrapper[4733]: I1213 07:06:58.194832 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:58 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:58 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:58 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:58 crc kubenswrapper[4733]: I1213 07:06:58.194904 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:58 crc kubenswrapper[4733]: I1213 07:06:58.712586 4733 generic.go:334] "Generic (PLEG): container finished" podID="5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d" containerID="24f01b7ba14f5e3d121bf06c078f6991a7a88a45545b7a603a6adf310e434228" exitCode=0 Dec 13 07:06:58 crc kubenswrapper[4733]: I1213 07:06:58.712669 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d","Type":"ContainerDied","Data":"24f01b7ba14f5e3d121bf06c078f6991a7a88a45545b7a603a6adf310e434228"} Dec 13 07:06:59 crc kubenswrapper[4733]: I1213 07:06:59.195083 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:06:59 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:06:59 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:06:59 crc kubenswrapper[4733]: healthz check failed Dec 13 07:06:59 crc kubenswrapper[4733]: I1213 07:06:59.195143 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:06:59 crc kubenswrapper[4733]: I1213 07:06:59.496040 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:06:59 crc kubenswrapper[4733]: I1213 07:06:59.496090 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:06:59 crc kubenswrapper[4733]: I1213 07:06:59.496106 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:06:59 crc kubenswrapper[4733]: I1213 07:06:59.496150 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:06:59 crc kubenswrapper[4733]: I1213 07:06:59.543755 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="hostpath-provisioner/csi-hostpathplugin-8sw6n" podUID="45abc2f0-f3bd-462e-8b4a-9c6ed7c30995" containerName="hostpath-provisioner" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.016598 4733 patch_prober.go:28] interesting pod/console-f9d7485db-z4czb container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.017054 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-z4czb" podUID="209b5640-5db3-46dc-8dd1-1a302a385526" containerName="console" probeResult="failure" output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.118957 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.194579 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:07:00 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:07:00 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:07:00 crc kubenswrapper[4733]: healthz check failed Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.194678 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.249320 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kube-api-access\") pod \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.249434 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kubelet-dir\") pod \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\" (UID: \"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d\") " Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.249932 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d" (UID: "5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.262921 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d" (UID: "5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.351342 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.351377 4733 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.735485 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d","Type":"ContainerDied","Data":"54c3368aa813c77b2b2e3616ebdd13e48f388d4d014fb60c44496d7a73c6f3d4"} Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.735853 4733 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54c3368aa813c77b2b2e3616ebdd13e48f388d4d014fb60c44496d7a73c6f3d4" Dec 13 07:07:00 crc kubenswrapper[4733]: I1213 07:07:00.735939 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 13 07:07:01 crc kubenswrapper[4733]: I1213 07:07:01.197758 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:07:01 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:07:01 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:07:01 crc kubenswrapper[4733]: healthz check failed Dec 13 07:07:01 crc kubenswrapper[4733]: I1213 07:07:01.197826 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:07:02 crc kubenswrapper[4733]: I1213 07:07:02.199058 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:07:02 crc kubenswrapper[4733]: [-]has-synced failed: reason withheld Dec 13 07:07:02 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:07:02 crc kubenswrapper[4733]: healthz check failed Dec 13 07:07:02 crc kubenswrapper[4733]: I1213 07:07:02.199118 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:07:02 crc kubenswrapper[4733]: I1213 07:07:02.914309 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:07:02 crc kubenswrapper[4733]: I1213 07:07:02.934229 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/868ca71b-6aae-4054-8b03-cf7c2b3499cd-metrics-certs\") pod \"network-metrics-daemon-r5dfg\" (UID: \"868ca71b-6aae-4054-8b03-cf7c2b3499cd\") " pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:07:03 crc kubenswrapper[4733]: I1213 07:07:03.006950 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r5dfg" Dec 13 07:07:03 crc kubenswrapper[4733]: I1213 07:07:03.196799 4733 patch_prober.go:28] interesting pod/router-default-5444994796-2qw2l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 13 07:07:03 crc kubenswrapper[4733]: [+]has-synced ok Dec 13 07:07:03 crc kubenswrapper[4733]: [+]process-running ok Dec 13 07:07:03 crc kubenswrapper[4733]: healthz check failed Dec 13 07:07:03 crc kubenswrapper[4733]: I1213 07:07:03.198766 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2qw2l" podUID="bbe545cc-6362-4ade-b7aa-65e5fa4892e2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 13 07:07:03 crc kubenswrapper[4733]: I1213 07:07:03.481896 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-r5dfg"] Dec 13 07:07:03 crc kubenswrapper[4733]: I1213 07:07:03.768725 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" event={"ID":"868ca71b-6aae-4054-8b03-cf7c2b3499cd","Type":"ContainerStarted","Data":"234f0ef30c6a70cd4463dc96468af4b4cc0e793418fd428bc19423be3ebefab5"} Dec 13 07:07:04 crc kubenswrapper[4733]: I1213 07:07:04.203140 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:07:04 crc kubenswrapper[4733]: I1213 07:07:04.208936 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-2qw2l" Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.491917 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.492354 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.492445 4733 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.492548 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.492617 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.493363 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.493406 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.493425 4733 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"7ae14e5d5bc5074711bbe6bb993210157ced36506e8d2b4f3c803f43a360bed5"} pod="openshift-console/downloads-7954f5f757-r59jx" containerMessage="Container download-server failed liveness probe, will be restarted" Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.493529 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" containerID="cri-o://7ae14e5d5bc5074711bbe6bb993210157ced36506e8d2b4f3c803f43a360bed5" gracePeriod=2 Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.938091 4733 generic.go:334] "Generic (PLEG): container finished" podID="02e53589-ca3a-41cc-be79-94b5453bf411" containerID="7ae14e5d5bc5074711bbe6bb993210157ced36506e8d2b4f3c803f43a360bed5" exitCode=0 Dec 13 07:07:09 crc kubenswrapper[4733]: I1213 07:07:09.938290 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-r59jx" event={"ID":"02e53589-ca3a-41cc-be79-94b5453bf411","Type":"ContainerDied","Data":"7ae14e5d5bc5074711bbe6bb993210157ced36506e8d2b4f3c803f43a360bed5"} Dec 13 07:07:10 crc kubenswrapper[4733]: I1213 07:07:10.053256 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:07:10 crc kubenswrapper[4733]: I1213 07:07:10.058082 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-z4czb" Dec 13 07:07:12 crc kubenswrapper[4733]: I1213 07:07:12.931141 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:07:13 crc kubenswrapper[4733]: I1213 07:07:13.974810 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" event={"ID":"868ca71b-6aae-4054-8b03-cf7c2b3499cd","Type":"ContainerStarted","Data":"d11ade535158ce08759ec324e600e98b5e562630fa75886d5866d70be4bef61f"} Dec 13 07:07:19 crc kubenswrapper[4733]: I1213 07:07:19.492668 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:19 crc kubenswrapper[4733]: I1213 07:07:19.493511 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:20 crc kubenswrapper[4733]: I1213 07:07:20.472005 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qcmlb" Dec 13 07:07:21 crc kubenswrapper[4733]: I1213 07:07:21.957827 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:07:21 crc kubenswrapper[4733]: I1213 07:07:21.957881 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:07:22 crc kubenswrapper[4733]: I1213 07:07:22.350838 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.435786 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 07:07:29 crc kubenswrapper[4733]: E1213 07:07:29.436407 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d" containerName="pruner" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.436419 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d" containerName="pruner" Dec 13 07:07:29 crc kubenswrapper[4733]: E1213 07:07:29.436427 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5596cd7-0096-4c45-bfaa-28fb6467b457" containerName="pruner" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.436433 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5596cd7-0096-4c45-bfaa-28fb6467b457" containerName="pruner" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.436535 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a7d4459-92b8-44fb-b3f0-e7c5c8e0836d" containerName="pruner" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.436547 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5596cd7-0096-4c45-bfaa-28fb6467b457" containerName="pruner" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.436917 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.439791 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.440083 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.453020 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.492253 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.492314 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.519903 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9ab2258-9990-4736-9383-da8b295a4bd5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.520203 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9ab2258-9990-4736-9383-da8b295a4bd5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.621831 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9ab2258-9990-4736-9383-da8b295a4bd5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.621929 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9ab2258-9990-4736-9383-da8b295a4bd5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.622006 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9ab2258-9990-4736-9383-da8b295a4bd5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.642926 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9ab2258-9990-4736-9383-da8b295a4bd5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:29 crc kubenswrapper[4733]: I1213 07:07:29.764474 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:32 crc kubenswrapper[4733]: E1213 07:07:32.627412 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 13 07:07:32 crc kubenswrapper[4733]: E1213 07:07:32.627960 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f57n2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gvt9c_openshift-marketplace(3bc7441a-bd67-4b1e-a799-684eb9e4463d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:32 crc kubenswrapper[4733]: E1213 07:07:32.629123 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gvt9c" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.639064 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.640070 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.647408 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.675522 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-var-lock\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.675579 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.675602 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kube-api-access\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.777365 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-var-lock\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.777431 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.777459 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kube-api-access\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.777864 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-var-lock\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.777912 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kubelet-dir\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.811015 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kube-api-access\") pod \"installer-9-crc\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:33 crc kubenswrapper[4733]: I1213 07:07:33.963250 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.080716 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gvt9c" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.118145 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.118340 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t86hj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7x9q8_openshift-marketplace(1f0dfb11-f02d-4007-8275-c92f802f9010): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.119551 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-7x9q8" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.190633 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.191058 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wphw6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-jm9wb_openshift-marketplace(11a45191-a277-4060-8dfc-4c577c6d2702): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.192157 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-jm9wb" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.216743 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.216923 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-brclb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-w5cq4_openshift-marketplace(f095c8b8-7de4-40cc-baf2-513801c2cd89): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:35 crc kubenswrapper[4733]: E1213 07:07:35.218112 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-w5cq4" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" Dec 13 07:07:36 crc kubenswrapper[4733]: E1213 07:07:36.619122 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x9q8" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" Dec 13 07:07:36 crc kubenswrapper[4733]: E1213 07:07:36.619445 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-jm9wb" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" Dec 13 07:07:36 crc kubenswrapper[4733]: E1213 07:07:36.620261 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-w5cq4" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" Dec 13 07:07:37 crc kubenswrapper[4733]: E1213 07:07:37.026181 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 13 07:07:37 crc kubenswrapper[4733]: E1213 07:07:37.026350 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-khb94,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-9xkkt_openshift-marketplace(ce65638e-1ce3-4625-ad9b-2dd7af81efc4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:37 crc kubenswrapper[4733]: E1213 07:07:37.027487 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-9xkkt" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" Dec 13 07:07:37 crc kubenswrapper[4733]: E1213 07:07:37.173363 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 13 07:07:37 crc kubenswrapper[4733]: E1213 07:07:37.173715 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zlmh7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-bjj7h_openshift-marketplace(93072069-7e83-44f8-a145-e4d481eeb3de): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:37 crc kubenswrapper[4733]: E1213 07:07:37.174835 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-bjj7h" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" Dec 13 07:07:39 crc kubenswrapper[4733]: I1213 07:07:39.492310 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:39 crc kubenswrapper[4733]: I1213 07:07:39.492776 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.092437 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-bjj7h" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.092433 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-9xkkt" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.176757 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.177140 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wqrz2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-wqqsg_openshift-marketplace(3851778b-0b71-4b8f-b562-0d8f9bb86e75): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.178339 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-wqqsg" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.210376 4733 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.210547 4733 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b6q8p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-wglbz_openshift-marketplace(13d8b178-e08a-43ec-a5ed-5a3c8816f7f7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 13 07:07:40 crc kubenswrapper[4733]: E1213 07:07:40.211687 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-wglbz" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" Dec 13 07:07:40 crc kubenswrapper[4733]: I1213 07:07:40.440955 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 13 07:07:40 crc kubenswrapper[4733]: I1213 07:07:40.539706 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.128448 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634","Type":"ContainerStarted","Data":"fd8793258dad01c22a21ff586444e072cee01ec176ebf40b754b8afa38d9ccb3"} Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.128514 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634","Type":"ContainerStarted","Data":"54c1c15bcc390af186cb7749e6f43588ec539a8c01545e2aa644bb7a2db0e69a"} Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.131496 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-r59jx" event={"ID":"02e53589-ca3a-41cc-be79-94b5453bf411","Type":"ContainerStarted","Data":"aa560254608c49e0d3ea41daebe5c16d32c906c3a3da03b1cd73df30bf07f64d"} Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.132668 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.132777 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.132820 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.139904 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-r5dfg" event={"ID":"868ca71b-6aae-4054-8b03-cf7c2b3499cd","Type":"ContainerStarted","Data":"aa7f727d208722cfb51a5905a459577c5d41e0e245cb998b985738aa9bed4267"} Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.146010 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c9ab2258-9990-4736-9383-da8b295a4bd5","Type":"ContainerStarted","Data":"801d9a558da33ad21cedaad5a94507ac0a7c460f2d45e79141b71c33f5981498"} Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.146052 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c9ab2258-9990-4736-9383-da8b295a4bd5","Type":"ContainerStarted","Data":"76013e3cf4a85ab72fdd91b50430075d326d8ae349d0c40be0889ba228d206fc"} Dec 13 07:07:41 crc kubenswrapper[4733]: E1213 07:07:41.147973 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-wqqsg" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.149490 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=8.149471815 podStartE2EDuration="8.149471815s" podCreationTimestamp="2025-12-13 07:07:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:07:41.145803683 +0000 UTC m=+205.757034166" watchObservedRunningTime="2025-12-13 07:07:41.149471815 +0000 UTC m=+205.760702318" Dec 13 07:07:41 crc kubenswrapper[4733]: E1213 07:07:41.149747 4733 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-wglbz" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.198858 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-r5dfg" podStartSLOduration=181.198836174 podStartE2EDuration="3m1.198836174s" podCreationTimestamp="2025-12-13 07:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:07:41.19615851 +0000 UTC m=+205.807388993" watchObservedRunningTime="2025-12-13 07:07:41.198836174 +0000 UTC m=+205.810066657" Dec 13 07:07:41 crc kubenswrapper[4733]: I1213 07:07:41.209805 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=12.209783578 podStartE2EDuration="12.209783578s" podCreationTimestamp="2025-12-13 07:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:07:41.208228485 +0000 UTC m=+205.819458978" watchObservedRunningTime="2025-12-13 07:07:41.209783578 +0000 UTC m=+205.821014061" Dec 13 07:07:42 crc kubenswrapper[4733]: I1213 07:07:42.152554 4733 generic.go:334] "Generic (PLEG): container finished" podID="c9ab2258-9990-4736-9383-da8b295a4bd5" containerID="801d9a558da33ad21cedaad5a94507ac0a7c460f2d45e79141b71c33f5981498" exitCode=0 Dec 13 07:07:42 crc kubenswrapper[4733]: I1213 07:07:42.152630 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c9ab2258-9990-4736-9383-da8b295a4bd5","Type":"ContainerDied","Data":"801d9a558da33ad21cedaad5a94507ac0a7c460f2d45e79141b71c33f5981498"} Dec 13 07:07:42 crc kubenswrapper[4733]: I1213 07:07:42.154371 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:42 crc kubenswrapper[4733]: I1213 07:07:42.154433 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:43 crc kubenswrapper[4733]: I1213 07:07:43.160486 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:43 crc kubenswrapper[4733]: I1213 07:07:43.160987 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:43 crc kubenswrapper[4733]: I1213 07:07:43.856296 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.010897 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9ab2258-9990-4736-9383-da8b295a4bd5-kube-api-access\") pod \"c9ab2258-9990-4736-9383-da8b295a4bd5\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.010989 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9ab2258-9990-4736-9383-da8b295a4bd5-kubelet-dir\") pod \"c9ab2258-9990-4736-9383-da8b295a4bd5\" (UID: \"c9ab2258-9990-4736-9383-da8b295a4bd5\") " Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.011322 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c9ab2258-9990-4736-9383-da8b295a4bd5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c9ab2258-9990-4736-9383-da8b295a4bd5" (UID: "c9ab2258-9990-4736-9383-da8b295a4bd5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.060408 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9ab2258-9990-4736-9383-da8b295a4bd5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c9ab2258-9990-4736-9383-da8b295a4bd5" (UID: "c9ab2258-9990-4736-9383-da8b295a4bd5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.112539 4733 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9ab2258-9990-4736-9383-da8b295a4bd5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.112576 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9ab2258-9990-4736-9383-da8b295a4bd5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.167059 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c9ab2258-9990-4736-9383-da8b295a4bd5","Type":"ContainerDied","Data":"76013e3cf4a85ab72fdd91b50430075d326d8ae349d0c40be0889ba228d206fc"} Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.167106 4733 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76013e3cf4a85ab72fdd91b50430075d326d8ae349d0c40be0889ba228d206fc" Dec 13 07:07:44 crc kubenswrapper[4733]: I1213 07:07:44.167172 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 13 07:07:49 crc kubenswrapper[4733]: I1213 07:07:49.491558 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:49 crc kubenswrapper[4733]: I1213 07:07:49.493465 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:49 crc kubenswrapper[4733]: I1213 07:07:49.491873 4733 patch_prober.go:28] interesting pod/downloads-7954f5f757-r59jx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 13 07:07:49 crc kubenswrapper[4733]: I1213 07:07:49.493623 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-r59jx" podUID="02e53589-ca3a-41cc-be79-94b5453bf411" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 13 07:07:51 crc kubenswrapper[4733]: I1213 07:07:51.957288 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:07:51 crc kubenswrapper[4733]: I1213 07:07:51.957687 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:07:52 crc kubenswrapper[4733]: I1213 07:07:52.023890 4733 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:07:52 crc kubenswrapper[4733]: I1213 07:07:52.024492 4733 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c"} pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:07:52 crc kubenswrapper[4733]: I1213 07:07:52.024567 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" containerID="cri-o://e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c" gracePeriod=600 Dec 13 07:07:53 crc kubenswrapper[4733]: I1213 07:07:53.030991 4733 generic.go:334] "Generic (PLEG): container finished" podID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerID="e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c" exitCode=0 Dec 13 07:07:53 crc kubenswrapper[4733]: I1213 07:07:53.031078 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerDied","Data":"e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c"} Dec 13 07:07:54 crc kubenswrapper[4733]: I1213 07:07:54.045929 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerStarted","Data":"3dbb50ccd329147a33d48adc785b7031a5cf7ea0a4b7160ebd43f87c292b72dd"} Dec 13 07:07:57 crc kubenswrapper[4733]: I1213 07:07:57.126805 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjj7h" event={"ID":"93072069-7e83-44f8-a145-e4d481eeb3de","Type":"ContainerStarted","Data":"7f4a8e117e727376d131bf415389f69f1a0e61808b7394d62f68daf6a58a9d20"} Dec 13 07:07:57 crc kubenswrapper[4733]: I1213 07:07:57.129014 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvt9c" event={"ID":"3bc7441a-bd67-4b1e-a799-684eb9e4463d","Type":"ContainerStarted","Data":"15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7"} Dec 13 07:07:57 crc kubenswrapper[4733]: I1213 07:07:57.141308 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jm9wb" event={"ID":"11a45191-a277-4060-8dfc-4c577c6d2702","Type":"ContainerStarted","Data":"6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e"} Dec 13 07:07:57 crc kubenswrapper[4733]: I1213 07:07:57.147248 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5cq4" event={"ID":"f095c8b8-7de4-40cc-baf2-513801c2cd89","Type":"ContainerStarted","Data":"034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b"} Dec 13 07:07:57 crc kubenswrapper[4733]: I1213 07:07:57.149517 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x9q8" event={"ID":"1f0dfb11-f02d-4007-8275-c92f802f9010","Type":"ContainerStarted","Data":"8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b"} Dec 13 07:07:58 crc kubenswrapper[4733]: I1213 07:07:58.307333 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xkkt" event={"ID":"ce65638e-1ce3-4625-ad9b-2dd7af81efc4","Type":"ContainerStarted","Data":"4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707"} Dec 13 07:07:58 crc kubenswrapper[4733]: I1213 07:07:58.354910 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wglbz" event={"ID":"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7","Type":"ContainerStarted","Data":"0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4"} Dec 13 07:07:58 crc kubenswrapper[4733]: I1213 07:07:58.377849 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wqqsg" event={"ID":"3851778b-0b71-4b8f-b562-0d8f9bb86e75","Type":"ContainerStarted","Data":"38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c"} Dec 13 07:07:59 crc kubenswrapper[4733]: I1213 07:07:59.447017 4733 generic.go:334] "Generic (PLEG): container finished" podID="93072069-7e83-44f8-a145-e4d481eeb3de" containerID="7f4a8e117e727376d131bf415389f69f1a0e61808b7394d62f68daf6a58a9d20" exitCode=0 Dec 13 07:07:59 crc kubenswrapper[4733]: I1213 07:07:59.447441 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjj7h" event={"ID":"93072069-7e83-44f8-a145-e4d481eeb3de","Type":"ContainerDied","Data":"7f4a8e117e727376d131bf415389f69f1a0e61808b7394d62f68daf6a58a9d20"} Dec 13 07:07:59 crc kubenswrapper[4733]: I1213 07:07:59.529938 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-r59jx" Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.464942 4733 generic.go:334] "Generic (PLEG): container finished" podID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerID="38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c" exitCode=0 Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.465084 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wqqsg" event={"ID":"3851778b-0b71-4b8f-b562-0d8f9bb86e75","Type":"ContainerDied","Data":"38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c"} Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.467908 4733 generic.go:334] "Generic (PLEG): container finished" podID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerID="034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b" exitCode=0 Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.467991 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5cq4" event={"ID":"f095c8b8-7de4-40cc-baf2-513801c2cd89","Type":"ContainerDied","Data":"034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b"} Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.474274 4733 generic.go:334] "Generic (PLEG): container finished" podID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerID="8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b" exitCode=0 Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.474356 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x9q8" event={"ID":"1f0dfb11-f02d-4007-8275-c92f802f9010","Type":"ContainerDied","Data":"8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b"} Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.476732 4733 generic.go:334] "Generic (PLEG): container finished" podID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerID="15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7" exitCode=0 Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.476785 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvt9c" event={"ID":"3bc7441a-bd67-4b1e-a799-684eb9e4463d","Type":"ContainerDied","Data":"15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7"} Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.482486 4733 generic.go:334] "Generic (PLEG): container finished" podID="11a45191-a277-4060-8dfc-4c577c6d2702" containerID="6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e" exitCode=0 Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.482555 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jm9wb" event={"ID":"11a45191-a277-4060-8dfc-4c577c6d2702","Type":"ContainerDied","Data":"6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e"} Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.490977 4733 generic.go:334] "Generic (PLEG): container finished" podID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerID="4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707" exitCode=0 Dec 13 07:08:02 crc kubenswrapper[4733]: I1213 07:08:02.491024 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xkkt" event={"ID":"ce65638e-1ce3-4625-ad9b-2dd7af81efc4","Type":"ContainerDied","Data":"4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707"} Dec 13 07:08:03 crc kubenswrapper[4733]: I1213 07:08:03.496722 4733 generic.go:334] "Generic (PLEG): container finished" podID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerID="0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4" exitCode=0 Dec 13 07:08:03 crc kubenswrapper[4733]: I1213 07:08:03.496764 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wglbz" event={"ID":"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7","Type":"ContainerDied","Data":"0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4"} Dec 13 07:08:08 crc kubenswrapper[4733]: I1213 07:08:08.524454 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjj7h" event={"ID":"93072069-7e83-44f8-a145-e4d481eeb3de","Type":"ContainerStarted","Data":"75706bce4a34e4d55d681f11e6cb58673ef10c61f36209f15a261517120bde55"} Dec 13 07:08:09 crc kubenswrapper[4733]: I1213 07:08:09.561713 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bjj7h" podStartSLOduration=5.958517887 podStartE2EDuration="1m20.561698066s" podCreationTimestamp="2025-12-13 07:06:49 +0000 UTC" firstStartedPulling="2025-12-13 07:06:52.375889662 +0000 UTC m=+156.987120145" lastFinishedPulling="2025-12-13 07:08:06.979069821 +0000 UTC m=+231.590300324" observedRunningTime="2025-12-13 07:08:09.558649101 +0000 UTC m=+234.169879584" watchObservedRunningTime="2025-12-13 07:08:09.561698066 +0000 UTC m=+234.172928539" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.017498 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.017897 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.538319 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wqqsg" event={"ID":"3851778b-0b71-4b8f-b562-0d8f9bb86e75","Type":"ContainerStarted","Data":"7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f"} Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.541643 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5cq4" event={"ID":"f095c8b8-7de4-40cc-baf2-513801c2cd89","Type":"ContainerStarted","Data":"f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436"} Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.544346 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x9q8" event={"ID":"1f0dfb11-f02d-4007-8275-c92f802f9010","Type":"ContainerStarted","Data":"8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7"} Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.546450 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvt9c" event={"ID":"3bc7441a-bd67-4b1e-a799-684eb9e4463d","Type":"ContainerStarted","Data":"6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0"} Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.548692 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jm9wb" event={"ID":"11a45191-a277-4060-8dfc-4c577c6d2702","Type":"ContainerStarted","Data":"949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962"} Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.550773 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xkkt" event={"ID":"ce65638e-1ce3-4625-ad9b-2dd7af81efc4","Type":"ContainerStarted","Data":"3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d"} Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.552991 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wglbz" event={"ID":"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7","Type":"ContainerStarted","Data":"eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a"} Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.597847 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wqqsg" podStartSLOduration=4.656548875 podStartE2EDuration="1m20.597827968s" podCreationTimestamp="2025-12-13 07:06:50 +0000 UTC" firstStartedPulling="2025-12-13 07:06:53.418362156 +0000 UTC m=+158.029592649" lastFinishedPulling="2025-12-13 07:08:09.359641259 +0000 UTC m=+233.970871742" observedRunningTime="2025-12-13 07:08:10.595335788 +0000 UTC m=+235.206566281" watchObservedRunningTime="2025-12-13 07:08:10.597827968 +0000 UTC m=+235.209058451" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.625534 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7x9q8" podStartSLOduration=3.791006016 podStartE2EDuration="1m23.625512806s" podCreationTimestamp="2025-12-13 07:06:47 +0000 UTC" firstStartedPulling="2025-12-13 07:06:50.04986332 +0000 UTC m=+154.661093803" lastFinishedPulling="2025-12-13 07:08:09.88437011 +0000 UTC m=+234.495600593" observedRunningTime="2025-12-13 07:08:10.62132575 +0000 UTC m=+235.232556243" watchObservedRunningTime="2025-12-13 07:08:10.625512806 +0000 UTC m=+235.236743289" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.646127 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gvt9c" podStartSLOduration=3.972617929 podStartE2EDuration="1m23.646108628s" podCreationTimestamp="2025-12-13 07:06:47 +0000 UTC" firstStartedPulling="2025-12-13 07:06:49.96616631 +0000 UTC m=+154.577396793" lastFinishedPulling="2025-12-13 07:08:09.639657009 +0000 UTC m=+234.250887492" observedRunningTime="2025-12-13 07:08:10.64474743 +0000 UTC m=+235.255977913" watchObservedRunningTime="2025-12-13 07:08:10.646108628 +0000 UTC m=+235.257339111" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.671347 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jm9wb" podStartSLOduration=4.048961024 podStartE2EDuration="1m23.671329608s" podCreationTimestamp="2025-12-13 07:06:47 +0000 UTC" firstStartedPulling="2025-12-13 07:06:49.976343709 +0000 UTC m=+154.587574202" lastFinishedPulling="2025-12-13 07:08:09.598712303 +0000 UTC m=+234.209942786" observedRunningTime="2025-12-13 07:08:10.668363216 +0000 UTC m=+235.279593729" watchObservedRunningTime="2025-12-13 07:08:10.671329608 +0000 UTC m=+235.282560091" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.686640 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-w5cq4" podStartSLOduration=4.072216018 podStartE2EDuration="1m23.686620092s" podCreationTimestamp="2025-12-13 07:06:47 +0000 UTC" firstStartedPulling="2025-12-13 07:06:50.026390137 +0000 UTC m=+154.637620620" lastFinishedPulling="2025-12-13 07:08:09.640794211 +0000 UTC m=+234.252024694" observedRunningTime="2025-12-13 07:08:10.685247234 +0000 UTC m=+235.296477727" watchObservedRunningTime="2025-12-13 07:08:10.686620092 +0000 UTC m=+235.297850575" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.706945 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9xkkt" podStartSLOduration=4.687615327 podStartE2EDuration="1m22.706919395s" podCreationTimestamp="2025-12-13 07:06:48 +0000 UTC" firstStartedPulling="2025-12-13 07:06:51.341153423 +0000 UTC m=+155.952383906" lastFinishedPulling="2025-12-13 07:08:09.360457491 +0000 UTC m=+233.971687974" observedRunningTime="2025-12-13 07:08:10.703128229 +0000 UTC m=+235.314358722" watchObservedRunningTime="2025-12-13 07:08:10.706919395 +0000 UTC m=+235.318149878" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.727538 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wglbz" podStartSLOduration=3.471972103 podStartE2EDuration="1m20.727502236s" podCreationTimestamp="2025-12-13 07:06:50 +0000 UTC" firstStartedPulling="2025-12-13 07:06:52.385822743 +0000 UTC m=+156.997053226" lastFinishedPulling="2025-12-13 07:08:09.641352876 +0000 UTC m=+234.252583359" observedRunningTime="2025-12-13 07:08:10.724399571 +0000 UTC m=+235.335630064" watchObservedRunningTime="2025-12-13 07:08:10.727502236 +0000 UTC m=+235.338732719" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.850011 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:08:10 crc kubenswrapper[4733]: I1213 07:08:10.850096 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:08:11 crc kubenswrapper[4733]: I1213 07:08:11.427585 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-bjj7h" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="registry-server" probeResult="failure" output=< Dec 13 07:08:11 crc kubenswrapper[4733]: timeout: failed to connect service ":50051" within 1s Dec 13 07:08:11 crc kubenswrapper[4733]: > Dec 13 07:08:11 crc kubenswrapper[4733]: I1213 07:08:11.946410 4733 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wqqsg" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="registry-server" probeResult="failure" output=< Dec 13 07:08:11 crc kubenswrapper[4733]: timeout: failed to connect service ":50051" within 1s Dec 13 07:08:11 crc kubenswrapper[4733]: > Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.410824 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.411225 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.453408 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.458814 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.648570 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.648677 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.852551 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.852603 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.990681 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.991981 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.995023 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:08:17 crc kubenswrapper[4733]: I1213 07:08:17.996039 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.058061 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.435752 4733 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.437014 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9ab2258-9990-4736-9383-da8b295a4bd5" containerName="pruner" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.437107 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9ab2258-9990-4736-9383-da8b295a4bd5" containerName="pruner" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.437371 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9ab2258-9990-4736-9383-da8b295a4bd5" containerName="pruner" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.438091 4733 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.438754 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.448152 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5" gracePeriod=15 Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.448509 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269" gracePeriod=15 Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.448670 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07" gracePeriod=15 Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.448727 4733 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.449231 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.449438 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.449189 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c" gracePeriod=15 Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.449534 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.449707 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.449809 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.449895 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.449361 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7" gracePeriod=15 Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.467088 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.467271 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.467369 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.467451 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.467544 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.467629 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.467715 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.467789 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.468146 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.468256 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.468330 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.468410 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.468495 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.468821 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.506436 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.610630 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.612054 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.612836 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c" exitCode=2 Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.619979 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.620051 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.620100 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.620128 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.620151 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.620196 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.620295 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.620318 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.672368 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.673192 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.673421 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.673739 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.675387 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.675845 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.676266 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.676521 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.676764 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.680760 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.681297 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.681744 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.682032 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.682288 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.682515 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721251 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721299 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721417 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721446 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721487 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721623 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721713 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721733 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721753 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721779 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.722067 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.722182 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.722246 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.722252 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.722331 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.721506 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: I1213 07:08:18.801103 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:08:18 crc kubenswrapper[4733]: W1213 07:08:18.823711 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-bd2a5f15b3438e7da8fe474c30c388ba09a2bb0fd1ab788e0d5da0a852cd4954 WatchSource:0}: Error finding container bd2a5f15b3438e7da8fe474c30c388ba09a2bb0fd1ab788e0d5da0a852cd4954: Status 404 returned error can't find the container with id bd2a5f15b3438e7da8fe474c30c388ba09a2bb0fd1ab788e0d5da0a852cd4954 Dec 13 07:08:18 crc kubenswrapper[4733]: E1213 07:08:18.827170 4733 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880b4aa9e8ab858 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 07:08:18.82656572 +0000 UTC m=+243.437796193,LastTimestamp:2025-12-13 07:08:18.82656572 +0000 UTC m=+243.437796193,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.484513 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.485504 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.533862 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.534507 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.534917 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.535202 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.535543 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.535786 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.536022 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.618991 4733 generic.go:334] "Generic (PLEG): container finished" podID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" containerID="fd8793258dad01c22a21ff586444e072cee01ec176ebf40b754b8afa38d9ccb3" exitCode=0 Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.619062 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634","Type":"ContainerDied","Data":"fd8793258dad01c22a21ff586444e072cee01ec176ebf40b754b8afa38d9ccb3"} Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.619844 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.620066 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.620305 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.620499 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.621127 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.621253 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b"} Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.621287 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"bd2a5f15b3438e7da8fe474c30c388ba09a2bb0fd1ab788e0d5da0a852cd4954"} Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.621543 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.623199 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.624541 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.625285 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269" exitCode=0 Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.625310 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07" exitCode=0 Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.625319 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7" exitCode=0 Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.625357 4733 scope.go:117] "RemoveContainer" containerID="e17d53f9071a28579ad26fe65b586bb20466ca03c0ce2e484d8b32aff2f2d294" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.662953 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.663635 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.663885 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.664166 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.664445 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.664690 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:19 crc kubenswrapper[4733]: I1213 07:08:19.664928 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.073581 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.074312 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.074997 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.075418 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.075912 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.076472 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.076830 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.077281 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.115373 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.116241 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.116650 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.117222 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.117864 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.118242 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.118704 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.118935 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.466461 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.466507 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.506232 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.506917 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.507204 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.507392 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.507559 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.507853 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.508168 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.508482 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.509165 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.636747 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.636983 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.637170 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.637379 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.649654 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.650007 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.663588 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.665575 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.769304 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.769979 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.770634 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.770944 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.771163 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.771422 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.771628 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.771818 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.772009 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.898385 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.907431 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.907953 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.908300 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.908572 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.908726 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.908871 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.909060 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.909222 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.910001 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.947049 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.947829 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.948488 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.948872 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.949116 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.949409 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.949803 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.950030 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.950597 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:20 crc kubenswrapper[4733]: I1213 07:08:20.950922 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.000077 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.000801 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.001509 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.002090 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.002457 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.002772 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.003048 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.003422 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.003847 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.004100 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.163102 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kube-api-access\") pod \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.163159 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kubelet-dir\") pod \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.163230 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-var-lock\") pod \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\" (UID: \"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634\") " Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.163486 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-var-lock" (OuterVolumeSpecName: "var-lock") pod "e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" (UID: "e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.163913 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" (UID: "e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.169035 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" (UID: "e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.264605 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.264664 4733 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.264682 4733 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634-var-lock\") on node \"crc\" DevicePath \"\"" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.644474 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.647256 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634","Type":"ContainerDied","Data":"54c1c15bcc390af186cb7749e6f43588ec539a8c01545e2aa644bb7a2db0e69a"} Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.647403 4733 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54c1c15bcc390af186cb7749e6f43588ec539a8c01545e2aa644bb7a2db0e69a" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.647340 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.652011 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.652368 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.652567 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.652748 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.652907 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.653051 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.653187 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.653694 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:21 crc kubenswrapper[4733]: I1213 07:08:21.653937 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.060042 4733 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.060506 4733 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.060874 4733 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.061127 4733 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.061403 4733 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.061435 4733 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.061634 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="200ms" Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.263054 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="400ms" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.657838 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.658641 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5" exitCode=0 Dec 13 07:08:22 crc kubenswrapper[4733]: E1213 07:08:22.690967 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="800ms" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.757972 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.759193 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.760115 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.760671 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.761176 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.761609 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.761926 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.762242 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.762530 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.762827 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.763090 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.763389 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.893684 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.893758 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.893805 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.893825 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.893871 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.893945 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.894153 4733 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.894169 4733 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:08:22 crc kubenswrapper[4733]: I1213 07:08:22.894179 4733 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:08:23 crc kubenswrapper[4733]: E1213 07:08:23.492116 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="1.6s" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.603716 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.668745 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.670816 4733 scope.go:117] "RemoveContainer" containerID="de9268a9799b559e6326e45e33f3d3f66d17ebd033a313664aa8a5552d8ae269" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.670959 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.672498 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.672925 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.674264 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.674513 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.674727 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.674914 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.675135 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.675375 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.675568 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.675751 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.676068 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.676319 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.676779 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.677006 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.677480 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.678232 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.678589 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.679040 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.679665 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.679927 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.690364 4733 scope.go:117] "RemoveContainer" containerID="e74f13b533a7bf9a8b0a25e74c55ed10898823442977d48ca00cfc6d0ce27c07" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.706596 4733 scope.go:117] "RemoveContainer" containerID="27f5230ee21ec7c2008649bbb77bb7829ac0a8c759a3d7a5b93d7d253dcbfda7" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.725077 4733 scope.go:117] "RemoveContainer" containerID="fb217e30c173c7222967166845a89cd8ec202e3a7bc420b01b1fb783fd55e78c" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.740520 4733 scope.go:117] "RemoveContainer" containerID="c14d64c839e652fe3a8e44edf45662f0227bd0e6a6d5dfb3bd50fcef7ba9d0f5" Dec 13 07:08:23 crc kubenswrapper[4733]: I1213 07:08:23.755153 4733 scope.go:117] "RemoveContainer" containerID="354d22132ae78ea027b7abb28540e09702bed54ddd65513c087b9ff357d9a8db" Dec 13 07:08:25 crc kubenswrapper[4733]: E1213 07:08:25.093167 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="3.2s" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.598376 4733 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.599051 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.599459 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.599777 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.600225 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.600704 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.601161 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.601467 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.601754 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:25 crc kubenswrapper[4733]: I1213 07:08:25.602059 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:26 crc kubenswrapper[4733]: E1213 07:08:26.264485 4733 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.41:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1880b4aa9e8ab858 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-13 07:08:18.82656572 +0000 UTC m=+243.437796193,LastTimestamp:2025-12-13 07:08:18.82656572 +0000 UTC m=+243.437796193,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 13 07:08:28 crc kubenswrapper[4733]: E1213 07:08:28.294018 4733 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.41:6443: connect: connection refused" interval="6.4s" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.593910 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.595536 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.596073 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.596649 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.596968 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.597244 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.597493 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.597861 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.598552 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.598894 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.616059 4733 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.616106 4733 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:31 crc kubenswrapper[4733]: E1213 07:08:31.616573 4733 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.616911 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:31 crc kubenswrapper[4733]: W1213 07:08:31.646460 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-1375b26b2e0bfcbc05f705591c98f106fa0ffe7a5a92d64979dc7050d6b4c697 WatchSource:0}: Error finding container 1375b26b2e0bfcbc05f705591c98f106fa0ffe7a5a92d64979dc7050d6b4c697: Status 404 returned error can't find the container with id 1375b26b2e0bfcbc05f705591c98f106fa0ffe7a5a92d64979dc7050d6b4c697 Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.719319 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1375b26b2e0bfcbc05f705591c98f106fa0ffe7a5a92d64979dc7050d6b4c697"} Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.727837 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.727884 4733 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d" exitCode=1 Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.727913 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d"} Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.728789 4733 scope.go:117] "RemoveContainer" containerID="d5f2da3765b22f8b2284bc5d5c4e9b1fd642cb45d19c5c6d7807a5002d01a65d" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.729467 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.729958 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.730416 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.730762 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.731004 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.731211 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.731433 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.731640 4733 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.731898 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:31 crc kubenswrapper[4733]: I1213 07:08:31.732086 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.736858 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.737500 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"207cc1e1393f200a1ecd95c00fc7a0452c4f225064c94adab51aa02b27a6d1db"} Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.738484 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.739027 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.739298 4733 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="43385fb1c6c49488b02d641a2bfc3cc0e5704dffab2b0b282d5f8bd6c7ca9f1c" exitCode=0 Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.739342 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"43385fb1c6c49488b02d641a2bfc3cc0e5704dffab2b0b282d5f8bd6c7ca9f1c"} Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.739672 4733 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.739705 4733 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.739741 4733 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: E1213 07:08:32.740049 4733 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.740112 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.740611 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.741016 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.741259 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.741579 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.742088 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.742415 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.742778 4733 status_manager.go:851] "Failed to get status for pod" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" pod="openshift-marketplace/redhat-operators-wglbz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wglbz\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.743511 4733 status_manager.go:851] "Failed to get status for pod" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" pod="openshift-marketplace/redhat-marketplace-bjj7h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-bjj7h\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.743741 4733 status_manager.go:851] "Failed to get status for pod" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.745525 4733 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.746057 4733 status_manager.go:851] "Failed to get status for pod" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" pod="openshift-marketplace/redhat-operators-wqqsg" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-wqqsg\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.746382 4733 status_manager.go:851] "Failed to get status for pod" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" pod="openshift-marketplace/community-operators-7x9q8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x9q8\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.746870 4733 status_manager.go:851] "Failed to get status for pod" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" pod="openshift-marketplace/community-operators-gvt9c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gvt9c\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.747188 4733 status_manager.go:851] "Failed to get status for pod" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" pod="openshift-marketplace/certified-operators-w5cq4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-w5cq4\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.747551 4733 status_manager.go:851] "Failed to get status for pod" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" pod="openshift-marketplace/redhat-marketplace-9xkkt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-9xkkt\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.747923 4733 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.41:6443: connect: connection refused" Dec 13 07:08:32 crc kubenswrapper[4733]: I1213 07:08:32.872495 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:08:33 crc kubenswrapper[4733]: I1213 07:08:33.751737 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f7f4bf571c3a214dbb31008c2602cad0c1f97ecf5a7873647cacb147d7e78ca5"} Dec 13 07:08:33 crc kubenswrapper[4733]: I1213 07:08:33.752132 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b73f046d7714f608fc879ea219254f9c27804a116d30a0e198f265ee8cc30035"} Dec 13 07:08:33 crc kubenswrapper[4733]: I1213 07:08:33.752145 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"40dea7aaa5063715aad9914a65677a1a48218fa8c99079054396a15dd577af56"} Dec 13 07:08:33 crc kubenswrapper[4733]: I1213 07:08:33.752155 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b7d4ebb638024f82e16cd16c5187b12022cf6d12c67f3f86eeb7904e3ff33368"} Dec 13 07:08:34 crc kubenswrapper[4733]: I1213 07:08:34.762639 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a2d0fa73f08b153ee5601dfc5637cc631eb324590221310ebc4c295d1ce3e7f2"} Dec 13 07:08:34 crc kubenswrapper[4733]: I1213 07:08:34.763611 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:34 crc kubenswrapper[4733]: I1213 07:08:34.763871 4733 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:34 crc kubenswrapper[4733]: I1213 07:08:34.763912 4733 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:35 crc kubenswrapper[4733]: I1213 07:08:35.615842 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:08:35 crc kubenswrapper[4733]: I1213 07:08:35.620195 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:08:36 crc kubenswrapper[4733]: I1213 07:08:36.618106 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:36 crc kubenswrapper[4733]: I1213 07:08:36.618169 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:36 crc kubenswrapper[4733]: I1213 07:08:36.627724 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:39 crc kubenswrapper[4733]: I1213 07:08:39.773906 4733 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:39 crc kubenswrapper[4733]: I1213 07:08:39.805386 4733 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:39 crc kubenswrapper[4733]: I1213 07:08:39.805427 4733 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:39 crc kubenswrapper[4733]: I1213 07:08:39.809672 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:39 crc kubenswrapper[4733]: I1213 07:08:39.993563 4733 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9b8db0e1-3ab1-44ca-9415-df6fe920358b" Dec 13 07:08:40 crc kubenswrapper[4733]: I1213 07:08:40.810883 4733 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:40 crc kubenswrapper[4733]: I1213 07:08:40.811395 4733 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="17831d74-d1c0-4b8e-aa94-83f240384afb" Dec 13 07:08:40 crc kubenswrapper[4733]: I1213 07:08:40.815831 4733 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9b8db0e1-3ab1-44ca-9415-df6fe920358b" Dec 13 07:08:42 crc kubenswrapper[4733]: I1213 07:08:42.877605 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 13 07:08:49 crc kubenswrapper[4733]: I1213 07:08:49.116764 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 13 07:08:49 crc kubenswrapper[4733]: I1213 07:08:49.131872 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 13 07:08:49 crc kubenswrapper[4733]: I1213 07:08:49.205531 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 13 07:08:49 crc kubenswrapper[4733]: I1213 07:08:49.453961 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 13 07:08:49 crc kubenswrapper[4733]: I1213 07:08:49.539408 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 13 07:08:49 crc kubenswrapper[4733]: I1213 07:08:49.817246 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 13 07:08:50 crc kubenswrapper[4733]: I1213 07:08:50.025523 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 13 07:08:50 crc kubenswrapper[4733]: I1213 07:08:50.057474 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 13 07:08:50 crc kubenswrapper[4733]: I1213 07:08:50.281384 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 13 07:08:50 crc kubenswrapper[4733]: I1213 07:08:50.607020 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 13 07:08:50 crc kubenswrapper[4733]: I1213 07:08:50.716916 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 13 07:08:50 crc kubenswrapper[4733]: I1213 07:08:50.798833 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 13 07:08:50 crc kubenswrapper[4733]: I1213 07:08:50.929617 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 13 07:08:51 crc kubenswrapper[4733]: I1213 07:08:51.275129 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 13 07:08:51 crc kubenswrapper[4733]: I1213 07:08:51.418644 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 13 07:08:51 crc kubenswrapper[4733]: I1213 07:08:51.526908 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 13 07:08:51 crc kubenswrapper[4733]: I1213 07:08:51.767010 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 13 07:08:51 crc kubenswrapper[4733]: I1213 07:08:51.977067 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.082066 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.091677 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.127434 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.194114 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.227058 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.367407 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.415651 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.491410 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.498768 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.510346 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.550555 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.788367 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.791686 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.835335 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 13 07:08:52 crc kubenswrapper[4733]: I1213 07:08:52.982673 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.006197 4733 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.058567 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.119264 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.204083 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.208799 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.463577 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.486666 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.507912 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.601385 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.675151 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.718094 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.771855 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.784007 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.786009 4733 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.797247 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.811310 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.885572 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 07:08:53 crc kubenswrapper[4733]: I1213 07:08:53.951367 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.066480 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.121740 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.266370 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.344739 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.399758 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.473508 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.488527 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.680247 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.684385 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.806080 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.856467 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.882354 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.965967 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 13 07:08:54 crc kubenswrapper[4733]: I1213 07:08:54.969386 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.047070 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.095878 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.453684 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.531864 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.612150 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.766913 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.806936 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 13 07:08:55 crc kubenswrapper[4733]: I1213 07:08:55.807016 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.050434 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.054920 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.105199 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.161075 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.193038 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.226895 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.239375 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.257618 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.275050 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.294034 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.296427 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.353589 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.360341 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.401139 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.531962 4733 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.532921 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=38.532900625 podStartE2EDuration="38.532900625s" podCreationTimestamp="2025-12-13 07:08:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:08:39.80241293 +0000 UTC m=+264.413643413" watchObservedRunningTime="2025-12-13 07:08:56.532900625 +0000 UTC m=+281.144131108" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.537421 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.537484 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.541637 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.566902 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=17.566867833 podStartE2EDuration="17.566867833s" podCreationTimestamp="2025-12-13 07:08:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:08:56.56156086 +0000 UTC m=+281.172791363" watchObservedRunningTime="2025-12-13 07:08:56.566867833 +0000 UTC m=+281.178098316" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.597679 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.612925 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.740254 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.742926 4733 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.762198 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.989085 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.996109 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 13 07:08:56 crc kubenswrapper[4733]: I1213 07:08:56.996343 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.104519 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.114403 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.135076 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.180239 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.186747 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.239397 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.373711 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.547372 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.565399 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.670700 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.675274 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.741414 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.768095 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.819747 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.865146 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.892334 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 13 07:08:57 crc kubenswrapper[4733]: I1213 07:08:57.949696 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.058521 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.141379 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.194692 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.235121 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.239298 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.285437 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.421692 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.421799 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.455700 4733 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.456524 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.493478 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.532617 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.618636 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xqvh8"] Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.631646 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.731695 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.801095 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.918148 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.943674 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 13 07:08:58 crc kubenswrapper[4733]: I1213 07:08:58.968827 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.006991 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.039542 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.054346 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.058338 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.090518 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.153095 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.169437 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.187953 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.212554 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.272441 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.367669 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.408632 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.470665 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.472796 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.496019 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.536326 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.540938 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.592851 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.679255 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.695266 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.703185 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.708134 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.729407 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.740920 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.746348 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.842736 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.928622 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 13 07:08:59 crc kubenswrapper[4733]: I1213 07:08:59.986493 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.151359 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.242269 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.359487 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.415593 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.437441 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.473513 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.681747 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.694480 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.717370 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.770060 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.838269 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.868305 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 13 07:09:00 crc kubenswrapper[4733]: I1213 07:09:00.969594 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.079108 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.168912 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.263023 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.265298 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.456765 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.488521 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.489193 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.504303 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.531906 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.538787 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.573676 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.580704 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.595257 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.603129 4733 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.612510 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.614896 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.638457 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.660041 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.699641 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.702430 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.810458 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.868448 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.871728 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.936840 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 13 07:09:01 crc kubenswrapper[4733]: I1213 07:09:01.959106 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.003610 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.022472 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.043052 4733 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.043410 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b" gracePeriod=5 Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.049113 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.064533 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.069585 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.197143 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.264466 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.269471 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.277285 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.353084 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.531438 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.626101 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.843101 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 13 07:09:02 crc kubenswrapper[4733]: I1213 07:09:02.946578 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.012202 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.109563 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.236992 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.297013 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.331722 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.333762 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.347966 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.368228 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.450036 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.501802 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.507104 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.519781 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.631579 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.669548 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.899297 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.909623 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 13 07:09:03 crc kubenswrapper[4733]: I1213 07:09:03.937249 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.051527 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.107654 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.162791 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.222364 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.286049 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.304717 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.352407 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.439285 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.495088 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.553359 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.696090 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 13 07:09:04 crc kubenswrapper[4733]: I1213 07:09:04.913026 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.011926 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.146285 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.248307 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.282720 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.284787 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.385243 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.624056 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.855692 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 13 07:09:05 crc kubenswrapper[4733]: I1213 07:09:05.920783 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 13 07:09:06 crc kubenswrapper[4733]: I1213 07:09:06.741441 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.903083 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.903461 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.974483 4733 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.974572 4733 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b" exitCode=137 Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.974655 4733 scope.go:117] "RemoveContainer" containerID="5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b" Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.974695 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.992460 4733 scope.go:117] "RemoveContainer" containerID="5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b" Dec 13 07:09:07 crc kubenswrapper[4733]: E1213 07:09:07.992854 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b\": container with ID starting with 5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b not found: ID does not exist" containerID="5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b" Dec 13 07:09:07 crc kubenswrapper[4733]: I1213 07:09:07.992893 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b"} err="failed to get container status \"5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b\": rpc error: code = NotFound desc = could not find container \"5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b\": container with ID starting with 5d7f9d6e0c4e8bcff5306fa1110648847c0d150c50dbb9b616f79ca90dc2f43b not found: ID does not exist" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024050 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024137 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024191 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024288 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024306 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024349 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024386 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024378 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024495 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024649 4733 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024661 4733 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024671 4733 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.024680 4733 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.031971 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:09:08 crc kubenswrapper[4733]: I1213 07:09:08.125416 4733 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:09 crc kubenswrapper[4733]: I1213 07:09:09.619950 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 13 07:09:09 crc kubenswrapper[4733]: I1213 07:09:09.620609 4733 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 13 07:09:09 crc kubenswrapper[4733]: I1213 07:09:09.634470 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 07:09:09 crc kubenswrapper[4733]: I1213 07:09:09.634515 4733 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="25e11273-0f95-49b2-9bb7-f5f6269be44e" Dec 13 07:09:09 crc kubenswrapper[4733]: I1213 07:09:09.638446 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 13 07:09:09 crc kubenswrapper[4733]: I1213 07:09:09.638631 4733 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="25e11273-0f95-49b2-9bb7-f5f6269be44e" Dec 13 07:09:20 crc kubenswrapper[4733]: I1213 07:09:20.055046 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" event={"ID":"404840cf-dd63-4b5f-bde2-49af2c7ace66","Type":"ContainerDied","Data":"b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be"} Dec 13 07:09:20 crc kubenswrapper[4733]: I1213 07:09:20.056556 4733 scope.go:117] "RemoveContainer" containerID="b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be" Dec 13 07:09:20 crc kubenswrapper[4733]: I1213 07:09:20.055007 4733 generic.go:334] "Generic (PLEG): container finished" podID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerID="b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be" exitCode=0 Dec 13 07:09:20 crc kubenswrapper[4733]: I1213 07:09:20.187772 4733 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:09:20 crc kubenswrapper[4733]: I1213 07:09:20.187812 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:09:21 crc kubenswrapper[4733]: I1213 07:09:21.065384 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" event={"ID":"404840cf-dd63-4b5f-bde2-49af2c7ace66","Type":"ContainerStarted","Data":"748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896"} Dec 13 07:09:21 crc kubenswrapper[4733]: I1213 07:09:21.066000 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:09:21 crc kubenswrapper[4733]: I1213 07:09:21.068848 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:09:23 crc kubenswrapper[4733]: I1213 07:09:23.066273 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 13 07:09:23 crc kubenswrapper[4733]: I1213 07:09:23.648850 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" podUID="875adea7-a638-4064-a466-2ab4e37771e3" containerName="oauth-openshift" containerID="cri-o://3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36" gracePeriod=15 Dec 13 07:09:23 crc kubenswrapper[4733]: I1213 07:09:23.983799 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.010364 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf"] Dec 13 07:09:24 crc kubenswrapper[4733]: E1213 07:09:24.010594 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="875adea7-a638-4064-a466-2ab4e37771e3" containerName="oauth-openshift" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.010607 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="875adea7-a638-4064-a466-2ab4e37771e3" containerName="oauth-openshift" Dec 13 07:09:24 crc kubenswrapper[4733]: E1213 07:09:24.010625 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" containerName="installer" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.010633 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" containerName="installer" Dec 13 07:09:24 crc kubenswrapper[4733]: E1213 07:09:24.010646 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.010653 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.010743 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="875adea7-a638-4064-a466-2ab4e37771e3" containerName="oauth-openshift" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.010757 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6e7c6e2-3ab0-45cc-ba6b-ed5ef2ef9634" containerName="installer" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.010764 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.011101 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.024020 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf"] Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041062 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-ocp-branding-template\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041137 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-session\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041174 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2l48\" (UniqueName: \"kubernetes.io/projected/875adea7-a638-4064-a466-2ab4e37771e3-kube-api-access-x2l48\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041201 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-service-ca\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041283 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-error\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041308 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-idp-0-file-data\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041350 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-router-certs\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041390 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-login\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041446 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/875adea7-a638-4064-a466-2ab4e37771e3-audit-dir\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041471 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-cliconfig\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041502 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-serving-cert\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041526 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-trusted-ca-bundle\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041548 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-provider-selection\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041574 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-audit-policies\") pod \"875adea7-a638-4064-a466-2ab4e37771e3\" (UID: \"875adea7-a638-4064-a466-2ab4e37771e3\") " Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041617 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/875adea7-a638-4064-a466-2ab4e37771e3-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.041932 4733 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/875adea7-a638-4064-a466-2ab4e37771e3-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.042715 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.043188 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.045606 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.045698 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.048990 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.050279 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.051101 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.051609 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/875adea7-a638-4064-a466-2ab4e37771e3-kube-api-access-x2l48" (OuterVolumeSpecName: "kube-api-access-x2l48") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "kube-api-access-x2l48". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.051777 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.052098 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.052199 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.052735 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.059022 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "875adea7-a638-4064-a466-2ab4e37771e3" (UID: "875adea7-a638-4064-a466-2ab4e37771e3"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.084899 4733 generic.go:334] "Generic (PLEG): container finished" podID="875adea7-a638-4064-a466-2ab4e37771e3" containerID="3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36" exitCode=0 Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.084973 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" event={"ID":"875adea7-a638-4064-a466-2ab4e37771e3","Type":"ContainerDied","Data":"3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36"} Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.085001 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.085031 4733 scope.go:117] "RemoveContainer" containerID="3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.085015 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xqvh8" event={"ID":"875adea7-a638-4064-a466-2ab4e37771e3","Type":"ContainerDied","Data":"5a71a52fcb764578da42e4366cf283b9a55a05238724154bbd0fbbd2bf36b9d6"} Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.109010 4733 scope.go:117] "RemoveContainer" containerID="3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36" Dec 13 07:09:24 crc kubenswrapper[4733]: E1213 07:09:24.109939 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36\": container with ID starting with 3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36 not found: ID does not exist" containerID="3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.109988 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36"} err="failed to get container status \"3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36\": rpc error: code = NotFound desc = could not find container \"3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36\": container with ID starting with 3384b81d264bd1826a588e7a7d8a43ade37d7c2bd3a7944991383ac42a1d4e36 not found: ID does not exist" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.120940 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xqvh8"] Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.125313 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xqvh8"] Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.142585 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.142841 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.142876 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.142907 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-audit-policies\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.142934 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.142977 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143023 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143047 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143075 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143103 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b0674923-723b-41da-97dd-38f118e2d212-audit-dir\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143133 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143179 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hnzh\" (UniqueName: \"kubernetes.io/projected/b0674923-723b-41da-97dd-38f118e2d212-kube-api-access-7hnzh\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143290 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143318 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143393 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143418 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143454 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143518 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143538 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143553 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143567 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143584 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143601 4733 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143614 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143636 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143650 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2l48\" (UniqueName: \"kubernetes.io/projected/875adea7-a638-4064-a466-2ab4e37771e3-kube-api-access-x2l48\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.143662 4733 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/875adea7-a638-4064-a466-2ab4e37771e3-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245337 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245382 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245408 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245444 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245465 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245486 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245504 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-audit-policies\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245522 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245552 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245568 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245585 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245601 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b0674923-723b-41da-97dd-38f118e2d212-audit-dir\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245617 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.245632 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hnzh\" (UniqueName: \"kubernetes.io/projected/b0674923-723b-41da-97dd-38f118e2d212-kube-api-access-7hnzh\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.246740 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b0674923-723b-41da-97dd-38f118e2d212-audit-dir\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.246904 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.246904 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.247405 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-audit-policies\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.248294 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.250617 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.250631 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.250775 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.251039 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.251473 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.251600 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.252633 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.253695 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b0674923-723b-41da-97dd-38f118e2d212-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.262509 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hnzh\" (UniqueName: \"kubernetes.io/projected/b0674923-723b-41da-97dd-38f118e2d212-kube-api-access-7hnzh\") pod \"oauth-openshift-7fb5d9b995-fg5lf\" (UID: \"b0674923-723b-41da-97dd-38f118e2d212\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.334103 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:24 crc kubenswrapper[4733]: I1213 07:09:24.516371 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf"] Dec 13 07:09:24 crc kubenswrapper[4733]: W1213 07:09:24.519840 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0674923_723b_41da_97dd_38f118e2d212.slice/crio-bdf8e9fef1f2a3c61b3d4357fee4ddf6fab6b125f202e9489b284b5b31605b71 WatchSource:0}: Error finding container bdf8e9fef1f2a3c61b3d4357fee4ddf6fab6b125f202e9489b284b5b31605b71: Status 404 returned error can't find the container with id bdf8e9fef1f2a3c61b3d4357fee4ddf6fab6b125f202e9489b284b5b31605b71 Dec 13 07:09:25 crc kubenswrapper[4733]: I1213 07:09:25.094300 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" event={"ID":"b0674923-723b-41da-97dd-38f118e2d212","Type":"ContainerStarted","Data":"8f6a89eba2cde57ea581381058b42bbd3ad8d847471f1467b2e616e274d19931"} Dec 13 07:09:25 crc kubenswrapper[4733]: I1213 07:09:25.094643 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" event={"ID":"b0674923-723b-41da-97dd-38f118e2d212","Type":"ContainerStarted","Data":"bdf8e9fef1f2a3c61b3d4357fee4ddf6fab6b125f202e9489b284b5b31605b71"} Dec 13 07:09:25 crc kubenswrapper[4733]: I1213 07:09:25.094667 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:25 crc kubenswrapper[4733]: I1213 07:09:25.116784 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" podStartSLOduration=27.116766529 podStartE2EDuration="27.116766529s" podCreationTimestamp="2025-12-13 07:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:09:25.115444081 +0000 UTC m=+309.726674564" watchObservedRunningTime="2025-12-13 07:09:25.116766529 +0000 UTC m=+309.727997012" Dec 13 07:09:25 crc kubenswrapper[4733]: I1213 07:09:25.604277 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="875adea7-a638-4064-a466-2ab4e37771e3" path="/var/lib/kubelet/pods/875adea7-a638-4064-a466-2ab4e37771e3/volumes" Dec 13 07:09:25 crc kubenswrapper[4733]: I1213 07:09:25.604778 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7fb5d9b995-fg5lf" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.194708 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jwh8g"] Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.195358 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" podUID="e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" containerName="controller-manager" containerID="cri-o://9192be2a57b35a1a1d0956bdea9c82894dae4ad05c1960714faa71554c216fd9" gracePeriod=30 Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.331997 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn"] Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.332263 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" podUID="b17e913c-22ff-4166-bad2-fc4b1f9c6155" containerName="route-controller-manager" containerID="cri-o://702da900b132fc2037804eae62b99bda2c6a39b54e0a9a9765e848b5a68e3fbc" gracePeriod=30 Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.504481 4733 generic.go:334] "Generic (PLEG): container finished" podID="b17e913c-22ff-4166-bad2-fc4b1f9c6155" containerID="702da900b132fc2037804eae62b99bda2c6a39b54e0a9a9765e848b5a68e3fbc" exitCode=0 Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.504575 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" event={"ID":"b17e913c-22ff-4166-bad2-fc4b1f9c6155","Type":"ContainerDied","Data":"702da900b132fc2037804eae62b99bda2c6a39b54e0a9a9765e848b5a68e3fbc"} Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.506249 4733 generic.go:334] "Generic (PLEG): container finished" podID="e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" containerID="9192be2a57b35a1a1d0956bdea9c82894dae4ad05c1960714faa71554c216fd9" exitCode=0 Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.506274 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" event={"ID":"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a","Type":"ContainerDied","Data":"9192be2a57b35a1a1d0956bdea9c82894dae4ad05c1960714faa71554c216fd9"} Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.616413 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.703798 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.735727 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-client-ca\") pod \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.735836 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-serving-cert\") pod \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.735867 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmqgv\" (UniqueName: \"kubernetes.io/projected/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-kube-api-access-pmqgv\") pod \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.735916 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-config\") pod \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.735938 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-proxy-ca-bundles\") pod \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\" (UID: \"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.737502 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" (UID: "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.737786 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-config" (OuterVolumeSpecName: "config") pod "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" (UID: "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.739912 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-client-ca" (OuterVolumeSpecName: "client-ca") pod "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" (UID: "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.757903 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" (UID: "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.758041 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-kube-api-access-pmqgv" (OuterVolumeSpecName: "kube-api-access-pmqgv") pod "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" (UID: "e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a"). InnerVolumeSpecName "kube-api-access-pmqgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837416 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e913c-22ff-4166-bad2-fc4b1f9c6155-serving-cert\") pod \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837500 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-config\") pod \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837553 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg57c\" (UniqueName: \"kubernetes.io/projected/b17e913c-22ff-4166-bad2-fc4b1f9c6155-kube-api-access-bg57c\") pod \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837600 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-client-ca\") pod \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\" (UID: \"b17e913c-22ff-4166-bad2-fc4b1f9c6155\") " Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837898 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837914 4733 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837928 4733 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837939 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.837952 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmqgv\" (UniqueName: \"kubernetes.io/projected/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a-kube-api-access-pmqgv\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.838969 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-client-ca" (OuterVolumeSpecName: "client-ca") pod "b17e913c-22ff-4166-bad2-fc4b1f9c6155" (UID: "b17e913c-22ff-4166-bad2-fc4b1f9c6155"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.839115 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-config" (OuterVolumeSpecName: "config") pod "b17e913c-22ff-4166-bad2-fc4b1f9c6155" (UID: "b17e913c-22ff-4166-bad2-fc4b1f9c6155"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.841457 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b17e913c-22ff-4166-bad2-fc4b1f9c6155-kube-api-access-bg57c" (OuterVolumeSpecName: "kube-api-access-bg57c") pod "b17e913c-22ff-4166-bad2-fc4b1f9c6155" (UID: "b17e913c-22ff-4166-bad2-fc4b1f9c6155"). InnerVolumeSpecName "kube-api-access-bg57c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.842024 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b17e913c-22ff-4166-bad2-fc4b1f9c6155-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b17e913c-22ff-4166-bad2-fc4b1f9c6155" (UID: "b17e913c-22ff-4166-bad2-fc4b1f9c6155"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.939975 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg57c\" (UniqueName: \"kubernetes.io/projected/b17e913c-22ff-4166-bad2-fc4b1f9c6155-kube-api-access-bg57c\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.940608 4733 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.940626 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b17e913c-22ff-4166-bad2-fc4b1f9c6155-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:30 crc kubenswrapper[4733]: I1213 07:09:30.940636 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b17e913c-22ff-4166-bad2-fc4b1f9c6155-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.514022 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" event={"ID":"e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a","Type":"ContainerDied","Data":"f56638a3bb8770d674a91c0fe986b7893695837162fcb29fc14ad6881429df6b"} Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.514049 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-jwh8g" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.514086 4733 scope.go:117] "RemoveContainer" containerID="9192be2a57b35a1a1d0956bdea9c82894dae4ad05c1960714faa71554c216fd9" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.516948 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" event={"ID":"b17e913c-22ff-4166-bad2-fc4b1f9c6155","Type":"ContainerDied","Data":"8cdf770903d30dbf9f8df156ed37bfa8a2068200bac2895cb850316a9fddea09"} Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.517040 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.532814 4733 scope.go:117] "RemoveContainer" containerID="702da900b132fc2037804eae62b99bda2c6a39b54e0a9a9765e848b5a68e3fbc" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.547532 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jwh8g"] Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.555606 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-jwh8g"] Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.559689 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn"] Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.562837 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-4frbn"] Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.600770 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b17e913c-22ff-4166-bad2-fc4b1f9c6155" path="/var/lib/kubelet/pods/b17e913c-22ff-4166-bad2-fc4b1f9c6155/volumes" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.601578 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" path="/var/lib/kubelet/pods/e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a/volumes" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.944652 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9"] Dec 13 07:09:31 crc kubenswrapper[4733]: E1213 07:09:31.945113 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" containerName="controller-manager" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.945152 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" containerName="controller-manager" Dec 13 07:09:31 crc kubenswrapper[4733]: E1213 07:09:31.945189 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b17e913c-22ff-4166-bad2-fc4b1f9c6155" containerName="route-controller-manager" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.945249 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="b17e913c-22ff-4166-bad2-fc4b1f9c6155" containerName="route-controller-manager" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.945531 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ccefcd-4ad4-4011-82f8-1236dc6e4b1a" containerName="controller-manager" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.945581 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="b17e913c-22ff-4166-bad2-fc4b1f9c6155" containerName="route-controller-manager" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.946733 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.950752 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.950850 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.953604 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-lmxww"] Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.954135 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.954417 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.954453 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.954531 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.954579 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.958029 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.958882 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.958975 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.962909 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9"] Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.963942 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.963998 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.966701 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.971759 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-lmxww"] Dec 13 07:09:31 crc kubenswrapper[4733]: I1213 07:09:31.972552 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.054639 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-client-ca\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.054741 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-config\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.054798 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/988e28ab-766f-4c46-bb1a-e0064f00df78-serving-cert\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.054896 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftvtk\" (UniqueName: \"kubernetes.io/projected/6f2d43d6-ad5e-4056-b89f-d07d5993c386-kube-api-access-ftvtk\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.054964 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-config\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.055048 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2d43d6-ad5e-4056-b89f-d07d5993c386-serving-cert\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.055133 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-proxy-ca-bundles\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.055262 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxmjh\" (UniqueName: \"kubernetes.io/projected/988e28ab-766f-4c46-bb1a-e0064f00df78-kube-api-access-lxmjh\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.055361 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-client-ca\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157245 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2d43d6-ad5e-4056-b89f-d07d5993c386-serving-cert\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157342 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-proxy-ca-bundles\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157401 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxmjh\" (UniqueName: \"kubernetes.io/projected/988e28ab-766f-4c46-bb1a-e0064f00df78-kube-api-access-lxmjh\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157429 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-client-ca\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157488 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-client-ca\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157534 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-config\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157569 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/988e28ab-766f-4c46-bb1a-e0064f00df78-serving-cert\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157606 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftvtk\" (UniqueName: \"kubernetes.io/projected/6f2d43d6-ad5e-4056-b89f-d07d5993c386-kube-api-access-ftvtk\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.157644 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-config\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.159457 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-client-ca\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.159583 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-config\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.160236 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-client-ca\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.160461 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-proxy-ca-bundles\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.160612 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-config\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.174766 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/988e28ab-766f-4c46-bb1a-e0064f00df78-serving-cert\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.182763 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2d43d6-ad5e-4056-b89f-d07d5993c386-serving-cert\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.186048 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftvtk\" (UniqueName: \"kubernetes.io/projected/6f2d43d6-ad5e-4056-b89f-d07d5993c386-kube-api-access-ftvtk\") pod \"controller-manager-5947bdc4b4-lmxww\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.186864 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxmjh\" (UniqueName: \"kubernetes.io/projected/988e28ab-766f-4c46-bb1a-e0064f00df78-kube-api-access-lxmjh\") pod \"route-controller-manager-8596447d46-2dfz9\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.267623 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.284267 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.458293 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9"] Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.496009 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-lmxww"] Dec 13 07:09:32 crc kubenswrapper[4733]: W1213 07:09:32.502406 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f2d43d6_ad5e_4056_b89f_d07d5993c386.slice/crio-b7fc2342c3bf029c375a9c970a77f7790f7218e64b19bd52e3fdef3d49d04f7e WatchSource:0}: Error finding container b7fc2342c3bf029c375a9c970a77f7790f7218e64b19bd52e3fdef3d49d04f7e: Status 404 returned error can't find the container with id b7fc2342c3bf029c375a9c970a77f7790f7218e64b19bd52e3fdef3d49d04f7e Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.524051 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" event={"ID":"988e28ab-766f-4c46-bb1a-e0064f00df78","Type":"ContainerStarted","Data":"ab88fac87b824e4ee861f1cdeab4aa5f0a4a09bf680a2f88d4b381ceca94ef38"} Dec 13 07:09:32 crc kubenswrapper[4733]: I1213 07:09:32.526248 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" event={"ID":"6f2d43d6-ad5e-4056-b89f-d07d5993c386","Type":"ContainerStarted","Data":"b7fc2342c3bf029c375a9c970a77f7790f7218e64b19bd52e3fdef3d49d04f7e"} Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.411274 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.532674 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" event={"ID":"6f2d43d6-ad5e-4056-b89f-d07d5993c386","Type":"ContainerStarted","Data":"e0c14e4fa52c9d16b3fdb96089fb79e6d2cf1db5fadfb7e0f6d291684ff0cbf3"} Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.534349 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.536198 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" event={"ID":"988e28ab-766f-4c46-bb1a-e0064f00df78","Type":"ContainerStarted","Data":"b264183c77454fc8b3d7bdabab64a9e628ef4ac7bb960a115eade1d946117e23"} Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.536420 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.537688 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.541434 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.568204 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" podStartSLOduration=3.568188922 podStartE2EDuration="3.568188922s" podCreationTimestamp="2025-12-13 07:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:09:33.5667297 +0000 UTC m=+318.177960203" watchObservedRunningTime="2025-12-13 07:09:33.568188922 +0000 UTC m=+318.179419405" Dec 13 07:09:33 crc kubenswrapper[4733]: I1213 07:09:33.569158 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" podStartSLOduration=3.56915142 podStartE2EDuration="3.56915142s" podCreationTimestamp="2025-12-13 07:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:09:33.552778399 +0000 UTC m=+318.164008882" watchObservedRunningTime="2025-12-13 07:09:33.56915142 +0000 UTC m=+318.180381903" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.629237 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jm9wb"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.630260 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jm9wb" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="registry-server" containerID="cri-o://949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.641575 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w5cq4"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.641985 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-w5cq4" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="registry-server" containerID="cri-o://f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.650446 4733 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.651488 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7x9q8"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.651699 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7x9q8" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="registry-server" containerID="cri-o://8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.652648 4733 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.657714 4733 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.657753 4733 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-marketplace/certified-operators-w5cq4" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="registry-server" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.668491 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gvt9c"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.668828 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gvt9c" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="registry-server" containerID="cri-o://6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.670923 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5dwn"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.671144 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" containerID="cri-o://748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.678599 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xkkt"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.678852 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9xkkt" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="registry-server" containerID="cri-o://3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.684379 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjj7h"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.684658 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bjj7h" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="registry-server" containerID="cri-o://75706bce4a34e4d55d681f11e6cb58673ef10c61f36209f15a261517120bde55" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.696904 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-486jl"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.697882 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.701549 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wglbz"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.701809 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wglbz" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="registry-server" containerID="cri-o://eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.717474 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wqqsg"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.717989 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wqqsg" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="registry-server" containerID="cri-o://7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f" gracePeriod=30 Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.722150 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-486jl"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.748825 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c77f7ee6-129c-4a48-b822-e89558943b49-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.748905 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c77f7ee6-129c-4a48-b822-e89558943b49-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.749029 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppq8t\" (UniqueName: \"kubernetes.io/projected/c77f7ee6-129c-4a48-b822-e89558943b49-kube-api-access-ppq8t\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.852028 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppq8t\" (UniqueName: \"kubernetes.io/projected/c77f7ee6-129c-4a48-b822-e89558943b49-kube-api-access-ppq8t\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.852086 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c77f7ee6-129c-4a48-b822-e89558943b49-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.852175 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c77f7ee6-129c-4a48-b822-e89558943b49-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.854014 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c77f7ee6-129c-4a48-b822-e89558943b49-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.854676 4733 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0 is running failed: container process not found" containerID="6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.865723 4733 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0 is running failed: container process not found" containerID="6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.866961 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c77f7ee6-129c-4a48-b822-e89558943b49-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.871401 4733 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0 is running failed: container process not found" containerID="6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" cmd=["grpc_health_probe","-addr=:50051"] Dec 13 07:09:37 crc kubenswrapper[4733]: E1213 07:09:37.871470 4733 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-gvt9c" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="registry-server" Dec 13 07:09:37 crc kubenswrapper[4733]: I1213 07:09:37.872100 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppq8t\" (UniqueName: \"kubernetes.io/projected/c77f7ee6-129c-4a48-b822-e89558943b49-kube-api-access-ppq8t\") pod \"marketplace-operator-79b997595-486jl\" (UID: \"c77f7ee6-129c-4a48-b822-e89558943b49\") " pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.023680 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.381271 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.391480 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.393428 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.433424 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.434306 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.435040 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.437453 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.440329 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.459375 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-catalog-content\") pod \"f095c8b8-7de4-40cc-baf2-513801c2cd89\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.459667 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f57n2\" (UniqueName: \"kubernetes.io/projected/3bc7441a-bd67-4b1e-a799-684eb9e4463d-kube-api-access-f57n2\") pod \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.459783 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-catalog-content\") pod \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.459875 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brclb\" (UniqueName: \"kubernetes.io/projected/f095c8b8-7de4-40cc-baf2-513801c2cd89-kube-api-access-brclb\") pod \"f095c8b8-7de4-40cc-baf2-513801c2cd89\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.459970 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-utilities\") pod \"11a45191-a277-4060-8dfc-4c577c6d2702\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.460053 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-utilities\") pod \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\" (UID: \"3bc7441a-bd67-4b1e-a799-684eb9e4463d\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.460157 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-catalog-content\") pod \"11a45191-a277-4060-8dfc-4c577c6d2702\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.460311 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-utilities\") pod \"f095c8b8-7de4-40cc-baf2-513801c2cd89\" (UID: \"f095c8b8-7de4-40cc-baf2-513801c2cd89\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.460461 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wphw6\" (UniqueName: \"kubernetes.io/projected/11a45191-a277-4060-8dfc-4c577c6d2702-kube-api-access-wphw6\") pod \"11a45191-a277-4060-8dfc-4c577c6d2702\" (UID: \"11a45191-a277-4060-8dfc-4c577c6d2702\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.461754 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-utilities" (OuterVolumeSpecName: "utilities") pod "11a45191-a277-4060-8dfc-4c577c6d2702" (UID: "11a45191-a277-4060-8dfc-4c577c6d2702"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.462799 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-utilities" (OuterVolumeSpecName: "utilities") pod "3bc7441a-bd67-4b1e-a799-684eb9e4463d" (UID: "3bc7441a-bd67-4b1e-a799-684eb9e4463d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.463746 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-utilities" (OuterVolumeSpecName: "utilities") pod "f095c8b8-7de4-40cc-baf2-513801c2cd89" (UID: "f095c8b8-7de4-40cc-baf2-513801c2cd89"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.464802 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bc7441a-bd67-4b1e-a799-684eb9e4463d-kube-api-access-f57n2" (OuterVolumeSpecName: "kube-api-access-f57n2") pod "3bc7441a-bd67-4b1e-a799-684eb9e4463d" (UID: "3bc7441a-bd67-4b1e-a799-684eb9e4463d"). InnerVolumeSpecName "kube-api-access-f57n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.465344 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11a45191-a277-4060-8dfc-4c577c6d2702-kube-api-access-wphw6" (OuterVolumeSpecName: "kube-api-access-wphw6") pod "11a45191-a277-4060-8dfc-4c577c6d2702" (UID: "11a45191-a277-4060-8dfc-4c577c6d2702"). InnerVolumeSpecName "kube-api-access-wphw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.467347 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f095c8b8-7de4-40cc-baf2-513801c2cd89-kube-api-access-brclb" (OuterVolumeSpecName: "kube-api-access-brclb") pod "f095c8b8-7de4-40cc-baf2-513801c2cd89" (UID: "f095c8b8-7de4-40cc-baf2-513801c2cd89"). InnerVolumeSpecName "kube-api-access-brclb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.524371 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3bc7441a-bd67-4b1e-a799-684eb9e4463d" (UID: "3bc7441a-bd67-4b1e-a799-684eb9e4463d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.527810 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11a45191-a277-4060-8dfc-4c577c6d2702" (UID: "11a45191-a277-4060-8dfc-4c577c6d2702"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.537060 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f095c8b8-7de4-40cc-baf2-513801c2cd89" (UID: "f095c8b8-7de4-40cc-baf2-513801c2cd89"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561407 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t86hj\" (UniqueName: \"kubernetes.io/projected/1f0dfb11-f02d-4007-8275-c92f802f9010-kube-api-access-t86hj\") pod \"1f0dfb11-f02d-4007-8275-c92f802f9010\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561450 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khb94\" (UniqueName: \"kubernetes.io/projected/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-kube-api-access-khb94\") pod \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561489 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-catalog-content\") pod \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561517 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-catalog-content\") pod \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561538 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-utilities\") pod \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561564 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-operator-metrics\") pod \"404840cf-dd63-4b5f-bde2-49af2c7ace66\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561594 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-catalog-content\") pod \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561666 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-trusted-ca\") pod \"404840cf-dd63-4b5f-bde2-49af2c7ace66\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561693 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-utilities\") pod \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\" (UID: \"ce65638e-1ce3-4625-ad9b-2dd7af81efc4\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561736 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6q8p\" (UniqueName: \"kubernetes.io/projected/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-kube-api-access-b6q8p\") pod \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561816 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vcm8\" (UniqueName: \"kubernetes.io/projected/404840cf-dd63-4b5f-bde2-49af2c7ace66-kube-api-access-9vcm8\") pod \"404840cf-dd63-4b5f-bde2-49af2c7ace66\" (UID: \"404840cf-dd63-4b5f-bde2-49af2c7ace66\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.561897 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content\") pod \"1f0dfb11-f02d-4007-8275-c92f802f9010\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562003 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqrz2\" (UniqueName: \"kubernetes.io/projected/3851778b-0b71-4b8f-b562-0d8f9bb86e75-kube-api-access-wqrz2\") pod \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\" (UID: \"3851778b-0b71-4b8f-b562-0d8f9bb86e75\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562059 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-utilities\") pod \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\" (UID: \"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562090 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-utilities\") pod \"1f0dfb11-f02d-4007-8275-c92f802f9010\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562492 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562534 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562549 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wphw6\" (UniqueName: \"kubernetes.io/projected/11a45191-a277-4060-8dfc-4c577c6d2702-kube-api-access-wphw6\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562565 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f095c8b8-7de4-40cc-baf2-513801c2cd89-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562579 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f57n2\" (UniqueName: \"kubernetes.io/projected/3bc7441a-bd67-4b1e-a799-684eb9e4463d-kube-api-access-f57n2\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562618 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562630 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brclb\" (UniqueName: \"kubernetes.io/projected/f095c8b8-7de4-40cc-baf2-513801c2cd89-kube-api-access-brclb\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562641 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a45191-a277-4060-8dfc-4c577c6d2702-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.562653 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bc7441a-bd67-4b1e-a799-684eb9e4463d-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.563520 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-utilities" (OuterVolumeSpecName: "utilities") pod "1f0dfb11-f02d-4007-8275-c92f802f9010" (UID: "1f0dfb11-f02d-4007-8275-c92f802f9010"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.564005 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-utilities" (OuterVolumeSpecName: "utilities") pod "ce65638e-1ce3-4625-ad9b-2dd7af81efc4" (UID: "ce65638e-1ce3-4625-ad9b-2dd7af81efc4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.566392 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-utilities" (OuterVolumeSpecName: "utilities") pod "13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" (UID: "13d8b178-e08a-43ec-a5ed-5a3c8816f7f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.566423 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-utilities" (OuterVolumeSpecName: "utilities") pod "3851778b-0b71-4b8f-b562-0d8f9bb86e75" (UID: "3851778b-0b71-4b8f-b562-0d8f9bb86e75"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.567333 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "404840cf-dd63-4b5f-bde2-49af2c7ace66" (UID: "404840cf-dd63-4b5f-bde2-49af2c7ace66"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.578938 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f0dfb11-f02d-4007-8275-c92f802f9010-kube-api-access-t86hj" (OuterVolumeSpecName: "kube-api-access-t86hj") pod "1f0dfb11-f02d-4007-8275-c92f802f9010" (UID: "1f0dfb11-f02d-4007-8275-c92f802f9010"). InnerVolumeSpecName "kube-api-access-t86hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.581757 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3851778b-0b71-4b8f-b562-0d8f9bb86e75-kube-api-access-wqrz2" (OuterVolumeSpecName: "kube-api-access-wqrz2") pod "3851778b-0b71-4b8f-b562-0d8f9bb86e75" (UID: "3851778b-0b71-4b8f-b562-0d8f9bb86e75"). InnerVolumeSpecName "kube-api-access-wqrz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.589318 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-kube-api-access-b6q8p" (OuterVolumeSpecName: "kube-api-access-b6q8p") pod "13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" (UID: "13d8b178-e08a-43ec-a5ed-5a3c8816f7f7"). InnerVolumeSpecName "kube-api-access-b6q8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.590030 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-kube-api-access-khb94" (OuterVolumeSpecName: "kube-api-access-khb94") pod "ce65638e-1ce3-4625-ad9b-2dd7af81efc4" (UID: "ce65638e-1ce3-4625-ad9b-2dd7af81efc4"). InnerVolumeSpecName "kube-api-access-khb94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.590406 4733 generic.go:334] "Generic (PLEG): container finished" podID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerID="748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.590527 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" event={"ID":"404840cf-dd63-4b5f-bde2-49af2c7ace66","Type":"ContainerDied","Data":"748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.590598 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" event={"ID":"404840cf-dd63-4b5f-bde2-49af2c7ace66","Type":"ContainerDied","Data":"5f32a44cb36dde1a8670357adaf36b7ed5fd680e2cdd6f09f6a4b237dd56b8f4"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.590644 4733 scope.go:117] "RemoveContainer" containerID="748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.590924 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5dwn" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.596427 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/404840cf-dd63-4b5f-bde2-49af2c7ace66-kube-api-access-9vcm8" (OuterVolumeSpecName: "kube-api-access-9vcm8") pod "404840cf-dd63-4b5f-bde2-49af2c7ace66" (UID: "404840cf-dd63-4b5f-bde2-49af2c7ace66"). InnerVolumeSpecName "kube-api-access-9vcm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.598513 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "404840cf-dd63-4b5f-bde2-49af2c7ace66" (UID: "404840cf-dd63-4b5f-bde2-49af2c7ace66"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.604131 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jm9wb" event={"ID":"11a45191-a277-4060-8dfc-4c577c6d2702","Type":"ContainerDied","Data":"949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.604189 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jm9wb" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.604063 4733 generic.go:334] "Generic (PLEG): container finished" podID="11a45191-a277-4060-8dfc-4c577c6d2702" containerID="949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.604364 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jm9wb" event={"ID":"11a45191-a277-4060-8dfc-4c577c6d2702","Type":"ContainerDied","Data":"8ffab240b6f7f47417ce1d1864f8de114cec46b9ff8c5350c687d066fc4654d5"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.609771 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce65638e-1ce3-4625-ad9b-2dd7af81efc4" (UID: "ce65638e-1ce3-4625-ad9b-2dd7af81efc4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.624865 4733 generic.go:334] "Generic (PLEG): container finished" podID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerID="eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.625106 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wglbz" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.625268 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wglbz" event={"ID":"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7","Type":"ContainerDied","Data":"eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.625300 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wglbz" event={"ID":"13d8b178-e08a-43ec-a5ed-5a3c8816f7f7","Type":"ContainerDied","Data":"d15d67abaf3226e947ca9b7c6ea993db89714ce449c76d8c9e3600df6832f962"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.648804 4733 scope.go:117] "RemoveContainer" containerID="b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.649308 4733 generic.go:334] "Generic (PLEG): container finished" podID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerID="f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.649369 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5cq4" event={"ID":"f095c8b8-7de4-40cc-baf2-513801c2cd89","Type":"ContainerDied","Data":"f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.649403 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w5cq4" event={"ID":"f095c8b8-7de4-40cc-baf2-513801c2cd89","Type":"ContainerDied","Data":"15d116b3249873b88c3d49a117a288812ceab778e0fc949e54b63bd829ad8d09"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.649496 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w5cq4" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.656587 4733 generic.go:334] "Generic (PLEG): container finished" podID="93072069-7e83-44f8-a145-e4d481eeb3de" containerID="75706bce4a34e4d55d681f11e6cb58673ef10c61f36209f15a261517120bde55" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.656759 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjj7h" event={"ID":"93072069-7e83-44f8-a145-e4d481eeb3de","Type":"ContainerDied","Data":"75706bce4a34e4d55d681f11e6cb58673ef10c61f36209f15a261517120bde55"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.660302 4733 generic.go:334] "Generic (PLEG): container finished" podID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerID="6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.660388 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvt9c" event={"ID":"3bc7441a-bd67-4b1e-a799-684eb9e4463d","Type":"ContainerDied","Data":"6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.660416 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvt9c" event={"ID":"3bc7441a-bd67-4b1e-a799-684eb9e4463d","Type":"ContainerDied","Data":"16178ef2fea67566a1f1d3be4c2fd25926182b01f7b3f06789f7f25f0ac5213e"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.660515 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jm9wb"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.662897 4733 generic.go:334] "Generic (PLEG): container finished" podID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerID="8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.662973 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x9q8" event={"ID":"1f0dfb11-f02d-4007-8275-c92f802f9010","Type":"ContainerDied","Data":"8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.663001 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x9q8" event={"ID":"1f0dfb11-f02d-4007-8275-c92f802f9010","Type":"ContainerDied","Data":"dccd6753a1a470520e0390a156180a2054811a89cb0d9ef73d707311d9a1d325"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.663079 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x9q8" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.664119 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f0dfb11-f02d-4007-8275-c92f802f9010" (UID: "1f0dfb11-f02d-4007-8275-c92f802f9010"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.664329 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content\") pod \"1f0dfb11-f02d-4007-8275-c92f802f9010\" (UID: \"1f0dfb11-f02d-4007-8275-c92f802f9010\") " Dec 13 07:09:38 crc kubenswrapper[4733]: W1213 07:09:38.664558 4733 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/1f0dfb11-f02d-4007-8275-c92f802f9010/volumes/kubernetes.io~empty-dir/catalog-content Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.664573 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f0dfb11-f02d-4007-8275-c92f802f9010" (UID: "1f0dfb11-f02d-4007-8275-c92f802f9010"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.664908 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t86hj\" (UniqueName: \"kubernetes.io/projected/1f0dfb11-f02d-4007-8275-c92f802f9010-kube-api-access-t86hj\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.664956 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khb94\" (UniqueName: \"kubernetes.io/projected/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-kube-api-access-khb94\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665002 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665016 4733 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665026 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665035 4733 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/404840cf-dd63-4b5f-bde2-49af2c7ace66-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665044 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce65638e-1ce3-4625-ad9b-2dd7af81efc4-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665065 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6q8p\" (UniqueName: \"kubernetes.io/projected/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-kube-api-access-b6q8p\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665074 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vcm8\" (UniqueName: \"kubernetes.io/projected/404840cf-dd63-4b5f-bde2-49af2c7ace66-kube-api-access-9vcm8\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665085 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665094 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqrz2\" (UniqueName: \"kubernetes.io/projected/3851778b-0b71-4b8f-b562-0d8f9bb86e75-kube-api-access-wqrz2\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665103 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665112 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f0dfb11-f02d-4007-8275-c92f802f9010-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.665588 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvt9c" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.673696 4733 generic.go:334] "Generic (PLEG): container finished" podID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerID="3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.673789 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xkkt" event={"ID":"ce65638e-1ce3-4625-ad9b-2dd7af81efc4","Type":"ContainerDied","Data":"3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.673821 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9xkkt" event={"ID":"ce65638e-1ce3-4625-ad9b-2dd7af81efc4","Type":"ContainerDied","Data":"085de72efb2700882a7dc030c16518c4725ca833a35317ca95bc8842f2700490"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.673961 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9xkkt" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.674732 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jm9wb"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.681128 4733 scope.go:117] "RemoveContainer" containerID="748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896" Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.682594 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896\": container with ID starting with 748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896 not found: ID does not exist" containerID="748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.682673 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896"} err="failed to get container status \"748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896\": rpc error: code = NotFound desc = could not find container \"748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896\": container with ID starting with 748d58600664c54fc13967ad332637cfcbac7db40ebed1179ac2e8d012deb896 not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.682731 4733 scope.go:117] "RemoveContainer" containerID="b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.683160 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.683187 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be\": container with ID starting with b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be not found: ID does not exist" containerID="b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.683232 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be"} err="failed to get container status \"b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be\": rpc error: code = NotFound desc = could not find container \"b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be\": container with ID starting with b9a3757d233f21adf0fd612f1bc7f18d45ea7cc0c62e023430624d42ac5036be not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.683257 4733 scope.go:117] "RemoveContainer" containerID="949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.685168 4733 generic.go:334] "Generic (PLEG): container finished" podID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerID="7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f" exitCode=0 Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.685247 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wqqsg" event={"ID":"3851778b-0b71-4b8f-b562-0d8f9bb86e75","Type":"ContainerDied","Data":"7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.685283 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wqqsg" event={"ID":"3851778b-0b71-4b8f-b562-0d8f9bb86e75","Type":"ContainerDied","Data":"8d161174b2c7859e3bfa1c6662977c5a4122bc5bcbd739dbfa38e4846e898ec1"} Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.685461 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wqqsg" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.703224 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w5cq4"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.709550 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-w5cq4"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.733655 4733 scope.go:117] "RemoveContainer" containerID="6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.754999 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xkkt"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.761344 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9xkkt"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.768442 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlmh7\" (UniqueName: \"kubernetes.io/projected/93072069-7e83-44f8-a145-e4d481eeb3de-kube-api-access-zlmh7\") pod \"93072069-7e83-44f8-a145-e4d481eeb3de\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.768511 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-utilities\") pod \"93072069-7e83-44f8-a145-e4d481eeb3de\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.768601 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-catalog-content\") pod \"93072069-7e83-44f8-a145-e4d481eeb3de\" (UID: \"93072069-7e83-44f8-a145-e4d481eeb3de\") " Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.769674 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-utilities" (OuterVolumeSpecName: "utilities") pod "93072069-7e83-44f8-a145-e4d481eeb3de" (UID: "93072069-7e83-44f8-a145-e4d481eeb3de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.774549 4733 scope.go:117] "RemoveContainer" containerID="b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.774653 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93072069-7e83-44f8-a145-e4d481eeb3de-kube-api-access-zlmh7" (OuterVolumeSpecName: "kube-api-access-zlmh7") pod "93072069-7e83-44f8-a145-e4d481eeb3de" (UID: "93072069-7e83-44f8-a145-e4d481eeb3de"). InnerVolumeSpecName "kube-api-access-zlmh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.793801 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gvt9c"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.795943 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "93072069-7e83-44f8-a145-e4d481eeb3de" (UID: "93072069-7e83-44f8-a145-e4d481eeb3de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.805049 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gvt9c"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.822162 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3851778b-0b71-4b8f-b562-0d8f9bb86e75" (UID: "3851778b-0b71-4b8f-b562-0d8f9bb86e75"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.852457 4733 scope.go:117] "RemoveContainer" containerID="949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962" Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.854561 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962\": container with ID starting with 949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962 not found: ID does not exist" containerID="949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.854716 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962"} err="failed to get container status \"949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962\": rpc error: code = NotFound desc = could not find container \"949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962\": container with ID starting with 949b9f9fd0477a2e9cc4a0e93cabdb3929512f88af7c1768ede568c96ac90962 not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.854928 4733 scope.go:117] "RemoveContainer" containerID="6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e" Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.855561 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e\": container with ID starting with 6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e not found: ID does not exist" containerID="6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.855611 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e"} err="failed to get container status \"6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e\": rpc error: code = NotFound desc = could not find container \"6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e\": container with ID starting with 6115f12203e895a14fe1c287ae4c9ae4652928ea7c1c11bf84e6e6d33757be7e not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.855631 4733 scope.go:117] "RemoveContainer" containerID="b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f" Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.857178 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f\": container with ID starting with b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f not found: ID does not exist" containerID="b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.857220 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f"} err="failed to get container status \"b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f\": rpc error: code = NotFound desc = could not find container \"b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f\": container with ID starting with b504c2a27f416af9922656db91b4a2a3d51c0d2cb43fddd1b3e016f3333cbc6f not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.857249 4733 scope.go:117] "RemoveContainer" containerID="eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.863429 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" (UID: "13d8b178-e08a-43ec-a5ed-5a3c8816f7f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.871109 4733 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-utilities\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.871840 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93072069-7e83-44f8-a145-e4d481eeb3de-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.871913 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3851778b-0b71-4b8f-b562-0d8f9bb86e75-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.871972 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlmh7\" (UniqueName: \"kubernetes.io/projected/93072069-7e83-44f8-a145-e4d481eeb3de-kube-api-access-zlmh7\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.872034 4733 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.875644 4733 scope.go:117] "RemoveContainer" containerID="0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.899014 4733 scope.go:117] "RemoveContainer" containerID="b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.940260 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-486jl"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.943482 4733 scope.go:117] "RemoveContainer" containerID="eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a" Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.946924 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a\": container with ID starting with eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a not found: ID does not exist" containerID="eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.946968 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a"} err="failed to get container status \"eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a\": rpc error: code = NotFound desc = could not find container \"eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a\": container with ID starting with eb11af054122cdd04e56a6ac9ac18913f2f0475b8c43ceb782a473b68c46351a not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.947000 4733 scope.go:117] "RemoveContainer" containerID="0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4" Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.949153 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4\": container with ID starting with 0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4 not found: ID does not exist" containerID="0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.949197 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4"} err="failed to get container status \"0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4\": rpc error: code = NotFound desc = could not find container \"0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4\": container with ID starting with 0516a4690d640a478dd8177142d23a61d1240cb0ec67df681c28e676f66003b4 not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.949241 4733 scope.go:117] "RemoveContainer" containerID="b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.950095 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5dwn"] Dec 13 07:09:38 crc kubenswrapper[4733]: E1213 07:09:38.951612 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c\": container with ID starting with b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c not found: ID does not exist" containerID="b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.951652 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c"} err="failed to get container status \"b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c\": rpc error: code = NotFound desc = could not find container \"b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c\": container with ID starting with b25e0aeb5e6ebb9f8c2f44c494ad45d2d475fc9bb328547e3d7633428ec8d75c not found: ID does not exist" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.951684 4733 scope.go:117] "RemoveContainer" containerID="f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.956872 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5dwn"] Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.982373 4733 scope.go:117] "RemoveContainer" containerID="034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b" Dec 13 07:09:38 crc kubenswrapper[4733]: I1213 07:09:38.991419 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wglbz"] Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.001640 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wglbz"] Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.024403 4733 scope.go:117] "RemoveContainer" containerID="2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.036770 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7x9q8"] Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.042693 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7x9q8"] Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.047109 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wqqsg"] Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.048103 4733 scope.go:117] "RemoveContainer" containerID="f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.048644 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436\": container with ID starting with f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436 not found: ID does not exist" containerID="f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.048682 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436"} err="failed to get container status \"f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436\": rpc error: code = NotFound desc = could not find container \"f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436\": container with ID starting with f809e5426f8c07eb6b532a1688128997fe922f681e945f34cf5d6c61f13e1436 not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.048713 4733 scope.go:117] "RemoveContainer" containerID="034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.049188 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b\": container with ID starting with 034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b not found: ID does not exist" containerID="034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.049286 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b"} err="failed to get container status \"034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b\": rpc error: code = NotFound desc = could not find container \"034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b\": container with ID starting with 034f492260d106e28dbea397a2d9961584377ac44f4621df2de21e0527f1962b not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.049309 4733 scope.go:117] "RemoveContainer" containerID="2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.050145 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e\": container with ID starting with 2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e not found: ID does not exist" containerID="2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.050171 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e"} err="failed to get container status \"2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e\": rpc error: code = NotFound desc = could not find container \"2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e\": container with ID starting with 2ca5fc06a971e65e40210e14b73aff3c0156601a03f880b4fc51b131d213981e not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.050187 4733 scope.go:117] "RemoveContainer" containerID="6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.052804 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wqqsg"] Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.066580 4733 scope.go:117] "RemoveContainer" containerID="15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.081436 4733 scope.go:117] "RemoveContainer" containerID="34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.103101 4733 scope.go:117] "RemoveContainer" containerID="6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.103533 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0\": container with ID starting with 6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0 not found: ID does not exist" containerID="6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.103570 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0"} err="failed to get container status \"6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0\": rpc error: code = NotFound desc = could not find container \"6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0\": container with ID starting with 6ad64e2b6313443e36f5e663b9aafd4fb0a75ff59706a1d11d99400cbdd137a0 not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.103659 4733 scope.go:117] "RemoveContainer" containerID="15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.103938 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7\": container with ID starting with 15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7 not found: ID does not exist" containerID="15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.103965 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7"} err="failed to get container status \"15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7\": rpc error: code = NotFound desc = could not find container \"15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7\": container with ID starting with 15ffeb9aa2e4888c16f98c2120c57ff19caaeee996dd66ff182e398d1cf871e7 not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.103983 4733 scope.go:117] "RemoveContainer" containerID="34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.104179 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f\": container with ID starting with 34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f not found: ID does not exist" containerID="34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.104199 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f"} err="failed to get container status \"34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f\": rpc error: code = NotFound desc = could not find container \"34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f\": container with ID starting with 34f3670259f0c6e7aa1a697ee327cd41d363a5c8ee2347b772945872c9055d9f not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.104235 4733 scope.go:117] "RemoveContainer" containerID="8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.122770 4733 scope.go:117] "RemoveContainer" containerID="8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.149778 4733 scope.go:117] "RemoveContainer" containerID="aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.163495 4733 scope.go:117] "RemoveContainer" containerID="8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.164078 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7\": container with ID starting with 8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7 not found: ID does not exist" containerID="8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.164185 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7"} err="failed to get container status \"8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7\": rpc error: code = NotFound desc = could not find container \"8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7\": container with ID starting with 8c2beb477527953810238309a3bff29c6959d9879dfe255341f2a57bc8398ad7 not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.164332 4733 scope.go:117] "RemoveContainer" containerID="8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.164875 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b\": container with ID starting with 8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b not found: ID does not exist" containerID="8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.164906 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b"} err="failed to get container status \"8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b\": rpc error: code = NotFound desc = could not find container \"8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b\": container with ID starting with 8e43a8fb823095d67d4faae6e3f18d32b5c79b8ffd9a5c1fce6f08e7712cf04b not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.164930 4733 scope.go:117] "RemoveContainer" containerID="aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.165177 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a\": container with ID starting with aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a not found: ID does not exist" containerID="aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.165199 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a"} err="failed to get container status \"aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a\": rpc error: code = NotFound desc = could not find container \"aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a\": container with ID starting with aad10f8986a48fbe7987a89d3fbe1e557736a1cd460f77b083734af000c6b97a not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.165230 4733 scope.go:117] "RemoveContainer" containerID="3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.176241 4733 scope.go:117] "RemoveContainer" containerID="4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.189047 4733 scope.go:117] "RemoveContainer" containerID="c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.209140 4733 scope.go:117] "RemoveContainer" containerID="3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.209742 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d\": container with ID starting with 3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d not found: ID does not exist" containerID="3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.209780 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d"} err="failed to get container status \"3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d\": rpc error: code = NotFound desc = could not find container \"3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d\": container with ID starting with 3440eaaf834aeacb17ad548325fd539c538014fec7f60a8302073deea546032d not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.209802 4733 scope.go:117] "RemoveContainer" containerID="4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.210192 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707\": container with ID starting with 4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707 not found: ID does not exist" containerID="4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.210224 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707"} err="failed to get container status \"4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707\": rpc error: code = NotFound desc = could not find container \"4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707\": container with ID starting with 4593baabafb2c422a75cf3be550f2764c05a055d6878c6f4f41bd77819b4d707 not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.210237 4733 scope.go:117] "RemoveContainer" containerID="c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.210511 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2\": container with ID starting with c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2 not found: ID does not exist" containerID="c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.210532 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2"} err="failed to get container status \"c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2\": rpc error: code = NotFound desc = could not find container \"c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2\": container with ID starting with c4c1dc62247b1828e9f21a233ae28ccd95944940fd05f0524a54ae7d0e8466c2 not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.210544 4733 scope.go:117] "RemoveContainer" containerID="7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.223731 4733 scope.go:117] "RemoveContainer" containerID="38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.239468 4733 scope.go:117] "RemoveContainer" containerID="a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.255098 4733 scope.go:117] "RemoveContainer" containerID="7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.255748 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f\": container with ID starting with 7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f not found: ID does not exist" containerID="7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.255796 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f"} err="failed to get container status \"7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f\": rpc error: code = NotFound desc = could not find container \"7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f\": container with ID starting with 7daaa8cd92ec7a7b00d4c14a570fb6f1209638765225930931351c6ac714492f not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.255849 4733 scope.go:117] "RemoveContainer" containerID="38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.257130 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c\": container with ID starting with 38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c not found: ID does not exist" containerID="38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.257155 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c"} err="failed to get container status \"38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c\": rpc error: code = NotFound desc = could not find container \"38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c\": container with ID starting with 38f6b474ef06815356df381ed7325bbad73ed9d1e9d489481bcf471c36ba687c not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.257169 4733 scope.go:117] "RemoveContainer" containerID="a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c" Dec 13 07:09:39 crc kubenswrapper[4733]: E1213 07:09:39.257474 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c\": container with ID starting with a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c not found: ID does not exist" containerID="a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.257496 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c"} err="failed to get container status \"a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c\": rpc error: code = NotFound desc = could not find container \"a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c\": container with ID starting with a0622b90f7cc58aacc2305072384d7926feb7214caa0c738f94161774a191b7c not found: ID does not exist" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.604033 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" path="/var/lib/kubelet/pods/11a45191-a277-4060-8dfc-4c577c6d2702/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.605495 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" path="/var/lib/kubelet/pods/13d8b178-e08a-43ec-a5ed-5a3c8816f7f7/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.606346 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" path="/var/lib/kubelet/pods/1f0dfb11-f02d-4007-8275-c92f802f9010/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.608085 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" path="/var/lib/kubelet/pods/3851778b-0b71-4b8f-b562-0d8f9bb86e75/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.608948 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" path="/var/lib/kubelet/pods/3bc7441a-bd67-4b1e-a799-684eb9e4463d/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.610187 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" path="/var/lib/kubelet/pods/404840cf-dd63-4b5f-bde2-49af2c7ace66/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.610789 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" path="/var/lib/kubelet/pods/ce65638e-1ce3-4625-ad9b-2dd7af81efc4/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.611557 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" path="/var/lib/kubelet/pods/f095c8b8-7de4-40cc-baf2-513801c2cd89/volumes" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.695413 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-486jl" event={"ID":"c77f7ee6-129c-4a48-b822-e89558943b49","Type":"ContainerStarted","Data":"295cc94bcdbba14b65a4b4d51f6a541ef67bb5d145da24f094f83593d2953449"} Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.695459 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-486jl" event={"ID":"c77f7ee6-129c-4a48-b822-e89558943b49","Type":"ContainerStarted","Data":"27a097c7d7fadd14a7325ea32cb97283fc00d2976861c3c7ab0938b20efeb158"} Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.695748 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.697771 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-486jl" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.704449 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjj7h" event={"ID":"93072069-7e83-44f8-a145-e4d481eeb3de","Type":"ContainerDied","Data":"dcd79a0f431fd8be81af3b3e47c675d7589cd28aa8e46aa2da2b651cda2f1fac"} Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.704522 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjj7h" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.704539 4733 scope.go:117] "RemoveContainer" containerID="75706bce4a34e4d55d681f11e6cb58673ef10c61f36209f15a261517120bde55" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.714540 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-486jl" podStartSLOduration=2.714521358 podStartE2EDuration="2.714521358s" podCreationTimestamp="2025-12-13 07:09:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:09:39.711722227 +0000 UTC m=+324.322952720" watchObservedRunningTime="2025-12-13 07:09:39.714521358 +0000 UTC m=+324.325751841" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.723443 4733 scope.go:117] "RemoveContainer" containerID="7f4a8e117e727376d131bf415389f69f1a0e61808b7394d62f68daf6a58a9d20" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.739690 4733 scope.go:117] "RemoveContainer" containerID="1b9442d98c86b3f3d03cdabbfe2bc2d426b5950babb38dd833f0df515ffdfe33" Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.744790 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjj7h"] Dec 13 07:09:39 crc kubenswrapper[4733]: I1213 07:09:39.753557 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjj7h"] Dec 13 07:09:41 crc kubenswrapper[4733]: I1213 07:09:41.600931 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" path="/var/lib/kubelet/pods/93072069-7e83-44f8-a145-e4d481eeb3de/volumes" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.057011 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-prmc7"] Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.057981 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.057995 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058004 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058010 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058021 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058027 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058035 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058042 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058052 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058058 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058066 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058071 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058077 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058083 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058090 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058096 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058107 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058113 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058121 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058126 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058135 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058141 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058150 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058155 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058163 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058169 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058176 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058182 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058190 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058195 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058203 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058255 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058265 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058272 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058280 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058286 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058295 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058301 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058310 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058315 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058323 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058329 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058336 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058341 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058349 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058356 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058363 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058368 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="extract-content" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058374 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058380 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="extract-utilities" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058478 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058487 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="93072069-7e83-44f8-a145-e4d481eeb3de" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058497 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="13d8b178-e08a-43ec-a5ed-5a3c8816f7f7" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058506 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="3851778b-0b71-4b8f-b562-0d8f9bb86e75" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058515 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="11a45191-a277-4060-8dfc-4c577c6d2702" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058523 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="f095c8b8-7de4-40cc-baf2-513801c2cd89" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058529 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce65638e-1ce3-4625-ad9b-2dd7af81efc4" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058535 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058543 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f0dfb11-f02d-4007-8275-c92f802f9010" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058550 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bc7441a-bd67-4b1e-a799-684eb9e4463d" containerName="registry-server" Dec 13 07:09:48 crc kubenswrapper[4733]: E1213 07:09:48.058631 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.058637 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="404840cf-dd63-4b5f-bde2-49af2c7ace66" containerName="marketplace-operator" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.059323 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.062475 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.070870 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-prmc7"] Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.203306 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bpzb\" (UniqueName: \"kubernetes.io/projected/68d77121-ca01-41f6-be15-9b79f6911f5f-kube-api-access-6bpzb\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.203404 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68d77121-ca01-41f6-be15-9b79f6911f5f-utilities\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.203442 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68d77121-ca01-41f6-be15-9b79f6911f5f-catalog-content\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.254918 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k9tlx"] Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.255934 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.260582 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.270716 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9tlx"] Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.307624 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bpzb\" (UniqueName: \"kubernetes.io/projected/68d77121-ca01-41f6-be15-9b79f6911f5f-kube-api-access-6bpzb\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.307690 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68d77121-ca01-41f6-be15-9b79f6911f5f-utilities\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.307726 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68d77121-ca01-41f6-be15-9b79f6911f5f-catalog-content\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.308394 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68d77121-ca01-41f6-be15-9b79f6911f5f-catalog-content\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.308422 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68d77121-ca01-41f6-be15-9b79f6911f5f-utilities\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.343995 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bpzb\" (UniqueName: \"kubernetes.io/projected/68d77121-ca01-41f6-be15-9b79f6911f5f-kube-api-access-6bpzb\") pod \"community-operators-prmc7\" (UID: \"68d77121-ca01-41f6-be15-9b79f6911f5f\") " pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.380680 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.409025 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw58h\" (UniqueName: \"kubernetes.io/projected/49cc60d9-6142-4372-98a3-981c963a1f8d-kube-api-access-gw58h\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.409601 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49cc60d9-6142-4372-98a3-981c963a1f8d-utilities\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.409649 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49cc60d9-6142-4372-98a3-981c963a1f8d-catalog-content\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.511461 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw58h\" (UniqueName: \"kubernetes.io/projected/49cc60d9-6142-4372-98a3-981c963a1f8d-kube-api-access-gw58h\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.511569 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49cc60d9-6142-4372-98a3-981c963a1f8d-utilities\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.511601 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49cc60d9-6142-4372-98a3-981c963a1f8d-catalog-content\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.513079 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/49cc60d9-6142-4372-98a3-981c963a1f8d-utilities\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.513104 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/49cc60d9-6142-4372-98a3-981c963a1f8d-catalog-content\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.540588 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw58h\" (UniqueName: \"kubernetes.io/projected/49cc60d9-6142-4372-98a3-981c963a1f8d-kube-api-access-gw58h\") pod \"redhat-marketplace-k9tlx\" (UID: \"49cc60d9-6142-4372-98a3-981c963a1f8d\") " pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.610777 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:48 crc kubenswrapper[4733]: I1213 07:09:48.814204 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-prmc7"] Dec 13 07:09:48 crc kubenswrapper[4733]: W1213 07:09:48.824724 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68d77121_ca01_41f6_be15_9b79f6911f5f.slice/crio-6ba70100da83e879fc009c96023f87d21763c81f6df81c352c20df7290260588 WatchSource:0}: Error finding container 6ba70100da83e879fc009c96023f87d21763c81f6df81c352c20df7290260588: Status 404 returned error can't find the container with id 6ba70100da83e879fc009c96023f87d21763c81f6df81c352c20df7290260588 Dec 13 07:09:49 crc kubenswrapper[4733]: I1213 07:09:49.025349 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9tlx"] Dec 13 07:09:49 crc kubenswrapper[4733]: E1213 07:09:49.044677 4733 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68d77121_ca01_41f6_be15_9b79f6911f5f.slice/crio-conmon-4b4193205a3852d625d5508871b44f16f6642e94379d24586ac494fd568d44c1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68d77121_ca01_41f6_be15_9b79f6911f5f.slice/crio-4b4193205a3852d625d5508871b44f16f6642e94379d24586ac494fd568d44c1.scope\": RecentStats: unable to find data in memory cache]" Dec 13 07:09:49 crc kubenswrapper[4733]: I1213 07:09:49.764004 4733 generic.go:334] "Generic (PLEG): container finished" podID="49cc60d9-6142-4372-98a3-981c963a1f8d" containerID="2d813b318e5256c47bf73f5fec9c0fb0180565e2feaab0abd908e2fa6557685a" exitCode=0 Dec 13 07:09:49 crc kubenswrapper[4733]: I1213 07:09:49.764119 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9tlx" event={"ID":"49cc60d9-6142-4372-98a3-981c963a1f8d","Type":"ContainerDied","Data":"2d813b318e5256c47bf73f5fec9c0fb0180565e2feaab0abd908e2fa6557685a"} Dec 13 07:09:49 crc kubenswrapper[4733]: I1213 07:09:49.764375 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9tlx" event={"ID":"49cc60d9-6142-4372-98a3-981c963a1f8d","Type":"ContainerStarted","Data":"b7fbbde96d24d0414f3aff93818f8f5478e924b35ff9ac8f714d3e6a648eb5bf"} Dec 13 07:09:49 crc kubenswrapper[4733]: I1213 07:09:49.772797 4733 generic.go:334] "Generic (PLEG): container finished" podID="68d77121-ca01-41f6-be15-9b79f6911f5f" containerID="4b4193205a3852d625d5508871b44f16f6642e94379d24586ac494fd568d44c1" exitCode=0 Dec 13 07:09:49 crc kubenswrapper[4733]: I1213 07:09:49.772869 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prmc7" event={"ID":"68d77121-ca01-41f6-be15-9b79f6911f5f","Type":"ContainerDied","Data":"4b4193205a3852d625d5508871b44f16f6642e94379d24586ac494fd568d44c1"} Dec 13 07:09:49 crc kubenswrapper[4733]: I1213 07:09:49.772912 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prmc7" event={"ID":"68d77121-ca01-41f6-be15-9b79f6911f5f","Type":"ContainerStarted","Data":"6ba70100da83e879fc009c96023f87d21763c81f6df81c352c20df7290260588"} Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.168352 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-lmxww"] Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.168859 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" podUID="6f2d43d6-ad5e-4056-b89f-d07d5993c386" containerName="controller-manager" containerID="cri-o://e0c14e4fa52c9d16b3fdb96089fb79e6d2cf1db5fadfb7e0f6d291684ff0cbf3" gracePeriod=30 Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.208697 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9"] Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.209313 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" podUID="988e28ab-766f-4c46-bb1a-e0064f00df78" containerName="route-controller-manager" containerID="cri-o://b264183c77454fc8b3d7bdabab64a9e628ef4ac7bb960a115eade1d946117e23" gracePeriod=30 Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.458151 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sm9dq"] Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.459287 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.461890 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.468081 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sm9dq"] Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.537656 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c07864-14f3-4d8e-bb01-50460c24dbfb-utilities\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.537807 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6zsj\" (UniqueName: \"kubernetes.io/projected/80c07864-14f3-4d8e-bb01-50460c24dbfb-kube-api-access-z6zsj\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.537842 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c07864-14f3-4d8e-bb01-50460c24dbfb-catalog-content\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.639016 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6zsj\" (UniqueName: \"kubernetes.io/projected/80c07864-14f3-4d8e-bb01-50460c24dbfb-kube-api-access-z6zsj\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.639111 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c07864-14f3-4d8e-bb01-50460c24dbfb-catalog-content\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.639185 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c07864-14f3-4d8e-bb01-50460c24dbfb-utilities\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.639751 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80c07864-14f3-4d8e-bb01-50460c24dbfb-catalog-content\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.639911 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80c07864-14f3-4d8e-bb01-50460c24dbfb-utilities\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.674308 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fns8q"] Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.675677 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.678775 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.684857 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6zsj\" (UniqueName: \"kubernetes.io/projected/80c07864-14f3-4d8e-bb01-50460c24dbfb-kube-api-access-z6zsj\") pod \"redhat-operators-sm9dq\" (UID: \"80c07864-14f3-4d8e-bb01-50460c24dbfb\") " pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.706673 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fns8q"] Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.740570 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2fqh\" (UniqueName: \"kubernetes.io/projected/afbea49c-a387-4796-a81c-fa944d031fa7-kube-api-access-s2fqh\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.740634 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afbea49c-a387-4796-a81c-fa944d031fa7-catalog-content\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.740669 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afbea49c-a387-4796-a81c-fa944d031fa7-utilities\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.783589 4733 generic.go:334] "Generic (PLEG): container finished" podID="988e28ab-766f-4c46-bb1a-e0064f00df78" containerID="b264183c77454fc8b3d7bdabab64a9e628ef4ac7bb960a115eade1d946117e23" exitCode=0 Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.784037 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" event={"ID":"988e28ab-766f-4c46-bb1a-e0064f00df78","Type":"ContainerDied","Data":"b264183c77454fc8b3d7bdabab64a9e628ef4ac7bb960a115eade1d946117e23"} Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.793229 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prmc7" event={"ID":"68d77121-ca01-41f6-be15-9b79f6911f5f","Type":"ContainerStarted","Data":"583b5620c472f0b31fe5e51f19fd8256c0923e474c39bcd4d3fa36a9c1cb5831"} Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.800023 4733 generic.go:334] "Generic (PLEG): container finished" podID="6f2d43d6-ad5e-4056-b89f-d07d5993c386" containerID="e0c14e4fa52c9d16b3fdb96089fb79e6d2cf1db5fadfb7e0f6d291684ff0cbf3" exitCode=0 Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.800092 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" event={"ID":"6f2d43d6-ad5e-4056-b89f-d07d5993c386","Type":"ContainerDied","Data":"e0c14e4fa52c9d16b3fdb96089fb79e6d2cf1db5fadfb7e0f6d291684ff0cbf3"} Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.828988 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.842645 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afbea49c-a387-4796-a81c-fa944d031fa7-catalog-content\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.842716 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afbea49c-a387-4796-a81c-fa944d031fa7-utilities\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.842817 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2fqh\" (UniqueName: \"kubernetes.io/projected/afbea49c-a387-4796-a81c-fa944d031fa7-kube-api-access-s2fqh\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.843346 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afbea49c-a387-4796-a81c-fa944d031fa7-catalog-content\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.843753 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afbea49c-a387-4796-a81c-fa944d031fa7-utilities\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:50 crc kubenswrapper[4733]: I1213 07:09:50.866859 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2fqh\" (UniqueName: \"kubernetes.io/projected/afbea49c-a387-4796-a81c-fa944d031fa7-kube-api-access-s2fqh\") pod \"certified-operators-fns8q\" (UID: \"afbea49c-a387-4796-a81c-fa944d031fa7\") " pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.039623 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.287753 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.345182 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.345330 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:51 crc kubenswrapper[4733]: E1213 07:09:51.345642 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f2d43d6-ad5e-4056-b89f-d07d5993c386" containerName="controller-manager" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.345661 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f2d43d6-ad5e-4056-b89f-d07d5993c386" containerName="controller-manager" Dec 13 07:09:51 crc kubenswrapper[4733]: E1213 07:09:51.345673 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="988e28ab-766f-4c46-bb1a-e0064f00df78" containerName="route-controller-manager" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.345680 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="988e28ab-766f-4c46-bb1a-e0064f00df78" containerName="route-controller-manager" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.345793 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="988e28ab-766f-4c46-bb1a-e0064f00df78" containerName="route-controller-manager" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.345806 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f2d43d6-ad5e-4056-b89f-d07d5993c386" containerName="controller-manager" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.346358 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.350443 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.351730 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-config\") pod \"988e28ab-766f-4c46-bb1a-e0064f00df78\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.351787 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxmjh\" (UniqueName: \"kubernetes.io/projected/988e28ab-766f-4c46-bb1a-e0064f00df78-kube-api-access-lxmjh\") pod \"988e28ab-766f-4c46-bb1a-e0064f00df78\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.351837 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-client-ca\") pod \"988e28ab-766f-4c46-bb1a-e0064f00df78\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.351912 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/988e28ab-766f-4c46-bb1a-e0064f00df78-serving-cert\") pod \"988e28ab-766f-4c46-bb1a-e0064f00df78\" (UID: \"988e28ab-766f-4c46-bb1a-e0064f00df78\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.353612 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-client-ca" (OuterVolumeSpecName: "client-ca") pod "988e28ab-766f-4c46-bb1a-e0064f00df78" (UID: "988e28ab-766f-4c46-bb1a-e0064f00df78"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.353659 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-config" (OuterVolumeSpecName: "config") pod "988e28ab-766f-4c46-bb1a-e0064f00df78" (UID: "988e28ab-766f-4c46-bb1a-e0064f00df78"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.358837 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/988e28ab-766f-4c46-bb1a-e0064f00df78-kube-api-access-lxmjh" (OuterVolumeSpecName: "kube-api-access-lxmjh") pod "988e28ab-766f-4c46-bb1a-e0064f00df78" (UID: "988e28ab-766f-4c46-bb1a-e0064f00df78"). InnerVolumeSpecName "kube-api-access-lxmjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.359055 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988e28ab-766f-4c46-bb1a-e0064f00df78-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "988e28ab-766f-4c46-bb1a-e0064f00df78" (UID: "988e28ab-766f-4c46-bb1a-e0064f00df78"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.424269 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sm9dq"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453511 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2d43d6-ad5e-4056-b89f-d07d5993c386-serving-cert\") pod \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453596 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-client-ca\") pod \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453668 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-proxy-ca-bundles\") pod \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453737 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftvtk\" (UniqueName: \"kubernetes.io/projected/6f2d43d6-ad5e-4056-b89f-d07d5993c386-kube-api-access-ftvtk\") pod \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453770 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-config\") pod \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\" (UID: \"6f2d43d6-ad5e-4056-b89f-d07d5993c386\") " Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453930 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9qtq\" (UniqueName: \"kubernetes.io/projected/e98cc590-12c0-4e23-bca3-0b18e58a2549-kube-api-access-s9qtq\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453965 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e98cc590-12c0-4e23-bca3-0b18e58a2549-config\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.453995 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e98cc590-12c0-4e23-bca3-0b18e58a2549-serving-cert\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.454024 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e98cc590-12c0-4e23-bca3-0b18e58a2549-client-ca\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.454079 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.454089 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxmjh\" (UniqueName: \"kubernetes.io/projected/988e28ab-766f-4c46-bb1a-e0064f00df78-kube-api-access-lxmjh\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.454098 4733 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/988e28ab-766f-4c46-bb1a-e0064f00df78-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.454107 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/988e28ab-766f-4c46-bb1a-e0064f00df78-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.455127 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6f2d43d6-ad5e-4056-b89f-d07d5993c386" (UID: "6f2d43d6-ad5e-4056-b89f-d07d5993c386"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.455160 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-config" (OuterVolumeSpecName: "config") pod "6f2d43d6-ad5e-4056-b89f-d07d5993c386" (UID: "6f2d43d6-ad5e-4056-b89f-d07d5993c386"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.455518 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-client-ca" (OuterVolumeSpecName: "client-ca") pod "6f2d43d6-ad5e-4056-b89f-d07d5993c386" (UID: "6f2d43d6-ad5e-4056-b89f-d07d5993c386"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.457470 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2d43d6-ad5e-4056-b89f-d07d5993c386-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6f2d43d6-ad5e-4056-b89f-d07d5993c386" (UID: "6f2d43d6-ad5e-4056-b89f-d07d5993c386"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.458420 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f2d43d6-ad5e-4056-b89f-d07d5993c386-kube-api-access-ftvtk" (OuterVolumeSpecName: "kube-api-access-ftvtk") pod "6f2d43d6-ad5e-4056-b89f-d07d5993c386" (UID: "6f2d43d6-ad5e-4056-b89f-d07d5993c386"). InnerVolumeSpecName "kube-api-access-ftvtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.550878 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fns8q"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554738 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9qtq\" (UniqueName: \"kubernetes.io/projected/e98cc590-12c0-4e23-bca3-0b18e58a2549-kube-api-access-s9qtq\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554799 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e98cc590-12c0-4e23-bca3-0b18e58a2549-config\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554843 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e98cc590-12c0-4e23-bca3-0b18e58a2549-serving-cert\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554881 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e98cc590-12c0-4e23-bca3-0b18e58a2549-client-ca\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554928 4733 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f2d43d6-ad5e-4056-b89f-d07d5993c386-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554940 4733 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-client-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554951 4733 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554962 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftvtk\" (UniqueName: \"kubernetes.io/projected/6f2d43d6-ad5e-4056-b89f-d07d5993c386-kube-api-access-ftvtk\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.554972 4733 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f2d43d6-ad5e-4056-b89f-d07d5993c386-config\") on node \"crc\" DevicePath \"\"" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.556418 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e98cc590-12c0-4e23-bca3-0b18e58a2549-config\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.557243 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e98cc590-12c0-4e23-bca3-0b18e58a2549-client-ca\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.562781 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e98cc590-12c0-4e23-bca3-0b18e58a2549-serving-cert\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.572953 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9qtq\" (UniqueName: \"kubernetes.io/projected/e98cc590-12c0-4e23-bca3-0b18e58a2549-kube-api-access-s9qtq\") pod \"route-controller-manager-78df779bd8-lsh2n\" (UID: \"e98cc590-12c0-4e23-bca3-0b18e58a2549\") " pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.669084 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.809716 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" event={"ID":"6f2d43d6-ad5e-4056-b89f-d07d5993c386","Type":"ContainerDied","Data":"b7fc2342c3bf029c375a9c970a77f7790f7218e64b19bd52e3fdef3d49d04f7e"} Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.809809 4733 scope.go:117] "RemoveContainer" containerID="e0c14e4fa52c9d16b3fdb96089fb79e6d2cf1db5fadfb7e0f6d291684ff0cbf3" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.809976 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-lmxww" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.814265 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fns8q" event={"ID":"afbea49c-a387-4796-a81c-fa944d031fa7","Type":"ContainerStarted","Data":"7a7eaf0938b85ebcac335436ea24bb726169a4cee3bedc0a4cbb239a5eee8ed1"} Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.817531 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" event={"ID":"988e28ab-766f-4c46-bb1a-e0064f00df78","Type":"ContainerDied","Data":"ab88fac87b824e4ee861f1cdeab4aa5f0a4a09bf680a2f88d4b381ceca94ef38"} Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.817586 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.829372 4733 generic.go:334] "Generic (PLEG): container finished" podID="80c07864-14f3-4d8e-bb01-50460c24dbfb" containerID="b2f8ee7c016d9d70c64f95b32a36909e08c58397ba8caa4de5b2907e05afc589" exitCode=0 Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.829987 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm9dq" event={"ID":"80c07864-14f3-4d8e-bb01-50460c24dbfb","Type":"ContainerDied","Data":"b2f8ee7c016d9d70c64f95b32a36909e08c58397ba8caa4de5b2907e05afc589"} Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.830728 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm9dq" event={"ID":"80c07864-14f3-4d8e-bb01-50460c24dbfb","Type":"ContainerStarted","Data":"2500cc4f8a81a7a66dcbe608f999d3deee2f77a80209c9fd51c12a935b44970e"} Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.832392 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-lmxww"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.839138 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-lmxww"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.847491 4733 generic.go:334] "Generic (PLEG): container finished" podID="49cc60d9-6142-4372-98a3-981c963a1f8d" containerID="3ce02d3ba5504171f52bc73e714cb189c0bb799c9bbb34617d97c9a92a5af845" exitCode=0 Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.847718 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9tlx" event={"ID":"49cc60d9-6142-4372-98a3-981c963a1f8d","Type":"ContainerDied","Data":"3ce02d3ba5504171f52bc73e714cb189c0bb799c9bbb34617d97c9a92a5af845"} Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.856603 4733 scope.go:117] "RemoveContainer" containerID="b264183c77454fc8b3d7bdabab64a9e628ef4ac7bb960a115eade1d946117e23" Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.859303 4733 generic.go:334] "Generic (PLEG): container finished" podID="68d77121-ca01-41f6-be15-9b79f6911f5f" containerID="583b5620c472f0b31fe5e51f19fd8256c0923e474c39bcd4d3fa36a9c1cb5831" exitCode=0 Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.859345 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prmc7" event={"ID":"68d77121-ca01-41f6-be15-9b79f6911f5f","Type":"ContainerDied","Data":"583b5620c472f0b31fe5e51f19fd8256c0923e474c39bcd4d3fa36a9c1cb5831"} Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.860609 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.867888 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-2dfz9"] Dec 13 07:09:51 crc kubenswrapper[4733]: I1213 07:09:51.913507 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n"] Dec 13 07:09:51 crc kubenswrapper[4733]: W1213 07:09:51.918482 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode98cc590_12c0_4e23_bca3_0b18e58a2549.slice/crio-9da53bdd22747cdf00bc537c5b7702e6d754cf29d5205796299a80ae63e6502d WatchSource:0}: Error finding container 9da53bdd22747cdf00bc537c5b7702e6d754cf29d5205796299a80ae63e6502d: Status 404 returned error can't find the container with id 9da53bdd22747cdf00bc537c5b7702e6d754cf29d5205796299a80ae63e6502d Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.867534 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9tlx" event={"ID":"49cc60d9-6142-4372-98a3-981c963a1f8d","Type":"ContainerStarted","Data":"e15ec7a3ac95d9e468f15f6c924bb776a4896dab20095f858aea316820a532f5"} Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.870748 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-prmc7" event={"ID":"68d77121-ca01-41f6-be15-9b79f6911f5f","Type":"ContainerStarted","Data":"2542f8f6b6e7e3305eb4f897d0840e09edaae2b6eebf2d9c058b179a0bb2325d"} Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.874455 4733 generic.go:334] "Generic (PLEG): container finished" podID="afbea49c-a387-4796-a81c-fa944d031fa7" containerID="07fad01cbb3715ba5ea1d7f178aa31ac03ab3ceeded0d4162e4c68c9e43322e8" exitCode=0 Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.874555 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fns8q" event={"ID":"afbea49c-a387-4796-a81c-fa944d031fa7","Type":"ContainerDied","Data":"07fad01cbb3715ba5ea1d7f178aa31ac03ab3ceeded0d4162e4c68c9e43322e8"} Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.880599 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" event={"ID":"e98cc590-12c0-4e23-bca3-0b18e58a2549","Type":"ContainerStarted","Data":"4fcce47d00922b9534e4ed4dffa67fdddf191340f420648ddae8f70f9cec0156"} Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.880802 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" event={"ID":"e98cc590-12c0-4e23-bca3-0b18e58a2549","Type":"ContainerStarted","Data":"9da53bdd22747cdf00bc537c5b7702e6d754cf29d5205796299a80ae63e6502d"} Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.882636 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.889409 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.899010 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k9tlx" podStartSLOduration=2.091557654 podStartE2EDuration="4.898986816s" podCreationTimestamp="2025-12-13 07:09:48 +0000 UTC" firstStartedPulling="2025-12-13 07:09:49.769380378 +0000 UTC m=+334.380610861" lastFinishedPulling="2025-12-13 07:09:52.57680954 +0000 UTC m=+337.188040023" observedRunningTime="2025-12-13 07:09:52.894575599 +0000 UTC m=+337.505806102" watchObservedRunningTime="2025-12-13 07:09:52.898986816 +0000 UTC m=+337.510217309" Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.916238 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-78df779bd8-lsh2n" podStartSLOduration=2.9162035509999997 podStartE2EDuration="2.916203551s" podCreationTimestamp="2025-12-13 07:09:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:09:52.91374011 +0000 UTC m=+337.524970613" watchObservedRunningTime="2025-12-13 07:09:52.916203551 +0000 UTC m=+337.527434044" Dec 13 07:09:52 crc kubenswrapper[4733]: I1213 07:09:52.968331 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-prmc7" podStartSLOduration=2.410707333 podStartE2EDuration="4.968307641s" podCreationTimestamp="2025-12-13 07:09:48 +0000 UTC" firstStartedPulling="2025-12-13 07:09:49.775127214 +0000 UTC m=+334.386357697" lastFinishedPulling="2025-12-13 07:09:52.332727522 +0000 UTC m=+336.943958005" observedRunningTime="2025-12-13 07:09:52.96133442 +0000 UTC m=+337.572564903" watchObservedRunningTime="2025-12-13 07:09:52.968307641 +0000 UTC m=+337.579538124" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.602667 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f2d43d6-ad5e-4056-b89f-d07d5993c386" path="/var/lib/kubelet/pods/6f2d43d6-ad5e-4056-b89f-d07d5993c386/volumes" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.604173 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="988e28ab-766f-4c46-bb1a-e0064f00df78" path="/var/lib/kubelet/pods/988e28ab-766f-4c46-bb1a-e0064f00df78/volumes" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.891648 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm9dq" event={"ID":"80c07864-14f3-4d8e-bb01-50460c24dbfb","Type":"ContainerStarted","Data":"b15ed6fd181ea3b2bb401eaeb98cfe605158317984df0be53452d4429459d5da"} Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.957419 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-9cddcdb95-gnbds"] Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.958201 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.961950 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.961973 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.962067 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.962095 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.962329 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.962393 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.966923 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 13 07:09:53 crc kubenswrapper[4733]: I1213 07:09:53.971488 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-9cddcdb95-gnbds"] Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.088385 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-client-ca\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.088433 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4845t\" (UniqueName: \"kubernetes.io/projected/7bb68e50-9ed4-410c-9e2c-099e457012be-kube-api-access-4845t\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.088456 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-proxy-ca-bundles\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.088494 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-config\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.088527 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb68e50-9ed4-410c-9e2c-099e457012be-serving-cert\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.189815 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-client-ca\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.189892 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4845t\" (UniqueName: \"kubernetes.io/projected/7bb68e50-9ed4-410c-9e2c-099e457012be-kube-api-access-4845t\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.189915 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-proxy-ca-bundles\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.189940 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-config\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.189992 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb68e50-9ed4-410c-9e2c-099e457012be-serving-cert\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.190933 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-client-ca\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.191401 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-proxy-ca-bundles\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.191606 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bb68e50-9ed4-410c-9e2c-099e457012be-config\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.197478 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bb68e50-9ed4-410c-9e2c-099e457012be-serving-cert\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.209078 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4845t\" (UniqueName: \"kubernetes.io/projected/7bb68e50-9ed4-410c-9e2c-099e457012be-kube-api-access-4845t\") pod \"controller-manager-9cddcdb95-gnbds\" (UID: \"7bb68e50-9ed4-410c-9e2c-099e457012be\") " pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.305143 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.725370 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-9cddcdb95-gnbds"] Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.899416 4733 generic.go:334] "Generic (PLEG): container finished" podID="80c07864-14f3-4d8e-bb01-50460c24dbfb" containerID="b15ed6fd181ea3b2bb401eaeb98cfe605158317984df0be53452d4429459d5da" exitCode=0 Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.899491 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm9dq" event={"ID":"80c07864-14f3-4d8e-bb01-50460c24dbfb","Type":"ContainerDied","Data":"b15ed6fd181ea3b2bb401eaeb98cfe605158317984df0be53452d4429459d5da"} Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.901714 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" event={"ID":"7bb68e50-9ed4-410c-9e2c-099e457012be","Type":"ContainerStarted","Data":"66b3bdaf208a08ddffa580426513ebb7d55bd972196e093c64544874277c0ca6"} Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.901778 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" event={"ID":"7bb68e50-9ed4-410c-9e2c-099e457012be","Type":"ContainerStarted","Data":"6da01642feb3dcd4cfa03260190a2a571a3fb717a31893ec1b2ab380047fdaa3"} Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.902744 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.904303 4733 patch_prober.go:28] interesting pod/controller-manager-9cddcdb95-gnbds container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.66:8443/healthz\": dial tcp 10.217.0.66:8443: connect: connection refused" start-of-body= Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.904341 4733 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" podUID="7bb68e50-9ed4-410c-9e2c-099e457012be" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.66:8443/healthz\": dial tcp 10.217.0.66:8443: connect: connection refused" Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.906561 4733 generic.go:334] "Generic (PLEG): container finished" podID="afbea49c-a387-4796-a81c-fa944d031fa7" containerID="90ff3342f70e846f396d806fd763a8224093a8c557411b72ef1256034530ea96" exitCode=0 Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.907343 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fns8q" event={"ID":"afbea49c-a387-4796-a81c-fa944d031fa7","Type":"ContainerDied","Data":"90ff3342f70e846f396d806fd763a8224093a8c557411b72ef1256034530ea96"} Dec 13 07:09:54 crc kubenswrapper[4733]: I1213 07:09:54.965355 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" podStartSLOduration=4.9653325299999995 podStartE2EDuration="4.96533253s" podCreationTimestamp="2025-12-13 07:09:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:09:54.961678115 +0000 UTC m=+339.572908598" watchObservedRunningTime="2025-12-13 07:09:54.96533253 +0000 UTC m=+339.576563013" Dec 13 07:09:55 crc kubenswrapper[4733]: I1213 07:09:55.915664 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-9cddcdb95-gnbds" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.381706 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.382500 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.425424 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.612754 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.613177 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.659325 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.967126 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k9tlx" Dec 13 07:09:58 crc kubenswrapper[4733]: I1213 07:09:58.968397 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-prmc7" Dec 13 07:10:05 crc kubenswrapper[4733]: I1213 07:10:05.971324 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm9dq" event={"ID":"80c07864-14f3-4d8e-bb01-50460c24dbfb","Type":"ContainerStarted","Data":"257159ad47149d65e240c7bb9cb194dc5485e575b19d1035bcdfba28a2bb9b20"} Dec 13 07:10:05 crc kubenswrapper[4733]: I1213 07:10:05.975435 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fns8q" event={"ID":"afbea49c-a387-4796-a81c-fa944d031fa7","Type":"ContainerStarted","Data":"9e74c8d22cb1b06976b317c5fcf4af19e953fbc8940f6ecb0eb9a218e72c0d1b"} Dec 13 07:10:05 crc kubenswrapper[4733]: I1213 07:10:05.994886 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sm9dq" podStartSLOduration=2.595045455 podStartE2EDuration="15.994863313s" podCreationTimestamp="2025-12-13 07:09:50 +0000 UTC" firstStartedPulling="2025-12-13 07:09:51.856256633 +0000 UTC m=+336.467487116" lastFinishedPulling="2025-12-13 07:10:05.256074491 +0000 UTC m=+349.867304974" observedRunningTime="2025-12-13 07:10:05.990436645 +0000 UTC m=+350.601667158" watchObservedRunningTime="2025-12-13 07:10:05.994863313 +0000 UTC m=+350.606093796" Dec 13 07:10:06 crc kubenswrapper[4733]: I1213 07:10:06.017889 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fns8q" podStartSLOduration=3.663416565 podStartE2EDuration="16.017853964s" podCreationTimestamp="2025-12-13 07:09:50 +0000 UTC" firstStartedPulling="2025-12-13 07:09:52.907220363 +0000 UTC m=+337.518450846" lastFinishedPulling="2025-12-13 07:10:05.261657762 +0000 UTC m=+349.872888245" observedRunningTime="2025-12-13 07:10:06.014967781 +0000 UTC m=+350.626198264" watchObservedRunningTime="2025-12-13 07:10:06.017853964 +0000 UTC m=+350.629084447" Dec 13 07:10:10 crc kubenswrapper[4733]: I1213 07:10:10.834906 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:10:10 crc kubenswrapper[4733]: I1213 07:10:10.835385 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:10:10 crc kubenswrapper[4733]: I1213 07:10:10.873901 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:10:11 crc kubenswrapper[4733]: I1213 07:10:11.040034 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:10:11 crc kubenswrapper[4733]: I1213 07:10:11.040078 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:10:11 crc kubenswrapper[4733]: I1213 07:10:11.041220 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sm9dq" Dec 13 07:10:11 crc kubenswrapper[4733]: I1213 07:10:11.082622 4733 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:10:12 crc kubenswrapper[4733]: I1213 07:10:12.056788 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fns8q" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.180844 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lklth"] Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.182151 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.195032 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lklth"] Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.233888 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.233958 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsbbj\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-kube-api-access-hsbbj\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.233986 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.234075 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-registry-certificates\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.234119 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-trusted-ca\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.234148 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.234174 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-bound-sa-token\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.234286 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-registry-tls\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.263296 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.334718 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-registry-tls\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.334789 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.334840 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsbbj\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-kube-api-access-hsbbj\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.334871 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.334908 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-registry-certificates\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.334931 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-trusted-ca\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.334960 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-bound-sa-token\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.335707 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.336255 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-trusted-ca\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.336265 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-registry-certificates\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.340318 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.340361 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-registry-tls\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.352503 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsbbj\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-kube-api-access-hsbbj\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.353948 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e-bound-sa-token\") pod \"image-registry-66df7c8f76-lklth\" (UID: \"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e\") " pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.543168 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:19 crc kubenswrapper[4733]: I1213 07:10:19.944974 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lklth"] Dec 13 07:10:19 crc kubenswrapper[4733]: W1213 07:10:19.954302 4733 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e00fa2e_7b19_46fc_a2a9_9b3ad2996d6e.slice/crio-adbb5345af2fdf1062cce633b561dd6b48cbb756c54cb677e9c25e7848f2b378 WatchSource:0}: Error finding container adbb5345af2fdf1062cce633b561dd6b48cbb756c54cb677e9c25e7848f2b378: Status 404 returned error can't find the container with id adbb5345af2fdf1062cce633b561dd6b48cbb756c54cb677e9c25e7848f2b378 Dec 13 07:10:20 crc kubenswrapper[4733]: I1213 07:10:20.054197 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lklth" event={"ID":"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e","Type":"ContainerStarted","Data":"adbb5345af2fdf1062cce633b561dd6b48cbb756c54cb677e9c25e7848f2b378"} Dec 13 07:10:21 crc kubenswrapper[4733]: I1213 07:10:21.067881 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lklth" event={"ID":"8e00fa2e-7b19-46fc-a2a9-9b3ad2996d6e","Type":"ContainerStarted","Data":"4dd5ebcf99411df809be537c6894530bb7bca1c1887bb4722f7ca81240649da2"} Dec 13 07:10:21 crc kubenswrapper[4733]: I1213 07:10:21.069411 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:21 crc kubenswrapper[4733]: I1213 07:10:21.087504 4733 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-lklth" podStartSLOduration=2.087486001 podStartE2EDuration="2.087486001s" podCreationTimestamp="2025-12-13 07:10:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-13 07:10:21.085922256 +0000 UTC m=+365.697152739" watchObservedRunningTime="2025-12-13 07:10:21.087486001 +0000 UTC m=+365.698716484" Dec 13 07:10:21 crc kubenswrapper[4733]: I1213 07:10:21.957856 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:10:21 crc kubenswrapper[4733]: I1213 07:10:21.957918 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:10:39 crc kubenswrapper[4733]: I1213 07:10:39.549834 4733 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-lklth" Dec 13 07:10:39 crc kubenswrapper[4733]: I1213 07:10:39.606038 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-278t5"] Dec 13 07:10:51 crc kubenswrapper[4733]: I1213 07:10:51.957510 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:10:51 crc kubenswrapper[4733]: I1213 07:10:51.958140 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:11:04 crc kubenswrapper[4733]: I1213 07:11:04.646833 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" podUID="64178ade-8a25-4622-91a4-d09ae57fadf1" containerName="registry" containerID="cri-o://5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8" gracePeriod=30 Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.035091 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073493 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/64178ade-8a25-4622-91a4-d09ae57fadf1-installation-pull-secrets\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073724 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073799 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-tls\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073833 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-bound-sa-token\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073886 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-trusted-ca\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073936 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6552x\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-kube-api-access-6552x\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073961 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-certificates\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.073990 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/64178ade-8a25-4622-91a4-d09ae57fadf1-ca-trust-extracted\") pod \"64178ade-8a25-4622-91a4-d09ae57fadf1\" (UID: \"64178ade-8a25-4622-91a4-d09ae57fadf1\") " Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.074661 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.074746 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.080863 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.081352 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.081563 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64178ade-8a25-4622-91a4-d09ae57fadf1-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.081683 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-kube-api-access-6552x" (OuterVolumeSpecName: "kube-api-access-6552x") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "kube-api-access-6552x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.082294 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.093044 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64178ade-8a25-4622-91a4-d09ae57fadf1-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "64178ade-8a25-4622-91a4-d09ae57fadf1" (UID: "64178ade-8a25-4622-91a4-d09ae57fadf1"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.175133 4733 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.175183 4733 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.175194 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6552x\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-kube-api-access-6552x\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.175205 4733 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.175262 4733 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/64178ade-8a25-4622-91a4-d09ae57fadf1-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.175274 4733 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/64178ade-8a25-4622-91a4-d09ae57fadf1-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.175285 4733 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/64178ade-8a25-4622-91a4-d09ae57fadf1-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.316329 4733 generic.go:334] "Generic (PLEG): container finished" podID="64178ade-8a25-4622-91a4-d09ae57fadf1" containerID="5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8" exitCode=0 Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.316398 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.316391 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" event={"ID":"64178ade-8a25-4622-91a4-d09ae57fadf1","Type":"ContainerDied","Data":"5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8"} Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.316451 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-278t5" event={"ID":"64178ade-8a25-4622-91a4-d09ae57fadf1","Type":"ContainerDied","Data":"d5eae76ba1613800ac612f57c8b8281e3138813337b0f7ac7756d921c2e90c56"} Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.316470 4733 scope.go:117] "RemoveContainer" containerID="5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.333533 4733 scope.go:117] "RemoveContainer" containerID="5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8" Dec 13 07:11:05 crc kubenswrapper[4733]: E1213 07:11:05.333907 4733 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8\": container with ID starting with 5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8 not found: ID does not exist" containerID="5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.333937 4733 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8"} err="failed to get container status \"5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8\": rpc error: code = NotFound desc = could not find container \"5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8\": container with ID starting with 5972a42167d97f994f7dc3618699fb202c82796cc175f858bcfb6d6652271af8 not found: ID does not exist" Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.349405 4733 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-278t5"] Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.353328 4733 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-278t5"] Dec 13 07:11:05 crc kubenswrapper[4733]: I1213 07:11:05.605328 4733 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64178ade-8a25-4622-91a4-d09ae57fadf1" path="/var/lib/kubelet/pods/64178ade-8a25-4622-91a4-d09ae57fadf1/volumes" Dec 13 07:11:21 crc kubenswrapper[4733]: I1213 07:11:21.957146 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:11:21 crc kubenswrapper[4733]: I1213 07:11:21.957752 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:11:21 crc kubenswrapper[4733]: I1213 07:11:21.957798 4733 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:11:21 crc kubenswrapper[4733]: I1213 07:11:21.958467 4733 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3dbb50ccd329147a33d48adc785b7031a5cf7ea0a4b7160ebd43f87c292b72dd"} pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:11:21 crc kubenswrapper[4733]: I1213 07:11:21.958521 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" containerID="cri-o://3dbb50ccd329147a33d48adc785b7031a5cf7ea0a4b7160ebd43f87c292b72dd" gracePeriod=600 Dec 13 07:11:22 crc kubenswrapper[4733]: I1213 07:11:22.410715 4733 generic.go:334] "Generic (PLEG): container finished" podID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerID="3dbb50ccd329147a33d48adc785b7031a5cf7ea0a4b7160ebd43f87c292b72dd" exitCode=0 Dec 13 07:11:22 crc kubenswrapper[4733]: I1213 07:11:22.410819 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerDied","Data":"3dbb50ccd329147a33d48adc785b7031a5cf7ea0a4b7160ebd43f87c292b72dd"} Dec 13 07:11:22 crc kubenswrapper[4733]: I1213 07:11:22.411154 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerStarted","Data":"b8abd4204613056f61f6addf9d7a0e01486d89a8714bb50c039beccc5c10ab1f"} Dec 13 07:11:22 crc kubenswrapper[4733]: I1213 07:11:22.411188 4733 scope.go:117] "RemoveContainer" containerID="e27de6f1734a306a7ad31c9bfa2af341011845789f76d079d44f31349da66f0c" Dec 13 07:13:51 crc kubenswrapper[4733]: I1213 07:13:51.956980 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:13:51 crc kubenswrapper[4733]: I1213 07:13:51.958202 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:14:21 crc kubenswrapper[4733]: I1213 07:14:21.956893 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:14:21 crc kubenswrapper[4733]: I1213 07:14:21.957634 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:14:51 crc kubenswrapper[4733]: I1213 07:14:51.957922 4733 patch_prober.go:28] interesting pod/machine-config-daemon-d4w5t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 13 07:14:51 crc kubenswrapper[4733]: I1213 07:14:51.958709 4733 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 13 07:14:51 crc kubenswrapper[4733]: I1213 07:14:51.958770 4733 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" Dec 13 07:14:51 crc kubenswrapper[4733]: I1213 07:14:51.959411 4733 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b8abd4204613056f61f6addf9d7a0e01486d89a8714bb50c039beccc5c10ab1f"} pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 13 07:14:51 crc kubenswrapper[4733]: I1213 07:14:51.959473 4733 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" podUID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerName="machine-config-daemon" containerID="cri-o://b8abd4204613056f61f6addf9d7a0e01486d89a8714bb50c039beccc5c10ab1f" gracePeriod=600 Dec 13 07:14:52 crc kubenswrapper[4733]: I1213 07:14:52.560068 4733 generic.go:334] "Generic (PLEG): container finished" podID="fa175c9e-efff-4835-8b54-c98307e46f0b" containerID="b8abd4204613056f61f6addf9d7a0e01486d89a8714bb50c039beccc5c10ab1f" exitCode=0 Dec 13 07:14:52 crc kubenswrapper[4733]: I1213 07:14:52.560147 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerDied","Data":"b8abd4204613056f61f6addf9d7a0e01486d89a8714bb50c039beccc5c10ab1f"} Dec 13 07:14:52 crc kubenswrapper[4733]: I1213 07:14:52.560660 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4w5t" event={"ID":"fa175c9e-efff-4835-8b54-c98307e46f0b","Type":"ContainerStarted","Data":"ed31732b8c86ee011f7231aa08aea9334fff9c1ffea2469850ff614fa687ddf9"} Dec 13 07:14:52 crc kubenswrapper[4733]: I1213 07:14:52.560680 4733 scope.go:117] "RemoveContainer" containerID="3dbb50ccd329147a33d48adc785b7031a5cf7ea0a4b7160ebd43f87c292b72dd" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.174261 4733 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22"] Dec 13 07:15:00 crc kubenswrapper[4733]: E1213 07:15:00.175146 4733 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64178ade-8a25-4622-91a4-d09ae57fadf1" containerName="registry" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.175165 4733 state_mem.go:107] "Deleted CPUSet assignment" podUID="64178ade-8a25-4622-91a4-d09ae57fadf1" containerName="registry" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.175301 4733 memory_manager.go:354] "RemoveStaleState removing state" podUID="64178ade-8a25-4622-91a4-d09ae57fadf1" containerName="registry" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.175785 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.178754 4733 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.180386 4733 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.182720 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22"] Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.197690 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4113bb6-b683-400f-94bd-1b38c2454a1a-config-volume\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.197823 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4113bb6-b683-400f-94bd-1b38c2454a1a-secret-volume\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.197882 4733 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs77x\" (UniqueName: \"kubernetes.io/projected/f4113bb6-b683-400f-94bd-1b38c2454a1a-kube-api-access-gs77x\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.298746 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4113bb6-b683-400f-94bd-1b38c2454a1a-config-volume\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.299051 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4113bb6-b683-400f-94bd-1b38c2454a1a-secret-volume\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.299088 4733 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs77x\" (UniqueName: \"kubernetes.io/projected/f4113bb6-b683-400f-94bd-1b38c2454a1a-kube-api-access-gs77x\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.300059 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4113bb6-b683-400f-94bd-1b38c2454a1a-config-volume\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.305205 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4113bb6-b683-400f-94bd-1b38c2454a1a-secret-volume\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.315104 4733 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs77x\" (UniqueName: \"kubernetes.io/projected/f4113bb6-b683-400f-94bd-1b38c2454a1a-kube-api-access-gs77x\") pod \"collect-profiles-29426835-sjc22\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.500359 4733 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:00 crc kubenswrapper[4733]: I1213 07:15:00.696993 4733 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22"] Dec 13 07:15:01 crc kubenswrapper[4733]: I1213 07:15:01.611532 4733 generic.go:334] "Generic (PLEG): container finished" podID="f4113bb6-b683-400f-94bd-1b38c2454a1a" containerID="d0730605ef0d645be71e721abfec814326d6dd157c3628f3e896f77b8c76c3c3" exitCode=0 Dec 13 07:15:01 crc kubenswrapper[4733]: I1213 07:15:01.611596 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" event={"ID":"f4113bb6-b683-400f-94bd-1b38c2454a1a","Type":"ContainerDied","Data":"d0730605ef0d645be71e721abfec814326d6dd157c3628f3e896f77b8c76c3c3"} Dec 13 07:15:01 crc kubenswrapper[4733]: I1213 07:15:01.611837 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" event={"ID":"f4113bb6-b683-400f-94bd-1b38c2454a1a","Type":"ContainerStarted","Data":"794f1604760d6d8431eb4b7c2f4667d3b98acb286efca80aecd133b438f0a385"} Dec 13 07:15:02 crc kubenswrapper[4733]: I1213 07:15:02.812349 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" Dec 13 07:15:02 crc kubenswrapper[4733]: I1213 07:15:02.933273 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4113bb6-b683-400f-94bd-1b38c2454a1a-secret-volume\") pod \"f4113bb6-b683-400f-94bd-1b38c2454a1a\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " Dec 13 07:15:02 crc kubenswrapper[4733]: I1213 07:15:02.933347 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4113bb6-b683-400f-94bd-1b38c2454a1a-config-volume\") pod \"f4113bb6-b683-400f-94bd-1b38c2454a1a\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " Dec 13 07:15:02 crc kubenswrapper[4733]: I1213 07:15:02.933403 4733 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs77x\" (UniqueName: \"kubernetes.io/projected/f4113bb6-b683-400f-94bd-1b38c2454a1a-kube-api-access-gs77x\") pod \"f4113bb6-b683-400f-94bd-1b38c2454a1a\" (UID: \"f4113bb6-b683-400f-94bd-1b38c2454a1a\") " Dec 13 07:15:02 crc kubenswrapper[4733]: I1213 07:15:02.934057 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4113bb6-b683-400f-94bd-1b38c2454a1a-config-volume" (OuterVolumeSpecName: "config-volume") pod "f4113bb6-b683-400f-94bd-1b38c2454a1a" (UID: "f4113bb6-b683-400f-94bd-1b38c2454a1a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 13 07:15:02 crc kubenswrapper[4733]: I1213 07:15:02.938518 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4113bb6-b683-400f-94bd-1b38c2454a1a-kube-api-access-gs77x" (OuterVolumeSpecName: "kube-api-access-gs77x") pod "f4113bb6-b683-400f-94bd-1b38c2454a1a" (UID: "f4113bb6-b683-400f-94bd-1b38c2454a1a"). InnerVolumeSpecName "kube-api-access-gs77x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 13 07:15:02 crc kubenswrapper[4733]: I1213 07:15:02.938712 4733 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4113bb6-b683-400f-94bd-1b38c2454a1a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f4113bb6-b683-400f-94bd-1b38c2454a1a" (UID: "f4113bb6-b683-400f-94bd-1b38c2454a1a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 13 07:15:03 crc kubenswrapper[4733]: I1213 07:15:03.034773 4733 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f4113bb6-b683-400f-94bd-1b38c2454a1a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:15:03 crc kubenswrapper[4733]: I1213 07:15:03.034818 4733 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f4113bb6-b683-400f-94bd-1b38c2454a1a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 13 07:15:03 crc kubenswrapper[4733]: I1213 07:15:03.034837 4733 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs77x\" (UniqueName: \"kubernetes.io/projected/f4113bb6-b683-400f-94bd-1b38c2454a1a-kube-api-access-gs77x\") on node \"crc\" DevicePath \"\"" Dec 13 07:15:03 crc kubenswrapper[4733]: I1213 07:15:03.621951 4733 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" event={"ID":"f4113bb6-b683-400f-94bd-1b38c2454a1a","Type":"ContainerDied","Data":"794f1604760d6d8431eb4b7c2f4667d3b98acb286efca80aecd133b438f0a385"} Dec 13 07:15:03 crc kubenswrapper[4733]: I1213 07:15:03.622001 4733 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="794f1604760d6d8431eb4b7c2f4667d3b98acb286efca80aecd133b438f0a385" Dec 13 07:15:03 crc kubenswrapper[4733]: I1213 07:15:03.622042 4733 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29426835-sjc22" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515117211251024441 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015117211252017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015117207501016504 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015117207501015454 5ustar corecore